def GetJoinTable(self, searchInfo): EquipmentTable = Base.metadata.tables['MonitoredSiteEquipment'] joinTable = super().GetJoinTable(searchInfo) joinTable = outerjoin( joinTable, EquipmentTable, and_(MonitoredSite.ID == EquipmentTable.c['FK_MonitoredSite'], or_(EquipmentTable.c['EndDate'] == None, EquipmentTable.c['EndDate'] >= func.now()))) joinTable = outerjoin( joinTable, Sensor, Sensor.ID == EquipmentTable.c['FK_Sensor']) joinTable = outerjoin( joinTable, SensorType, Sensor.FK_SensorType == SensorType.ID) self.selectable.append(Sensor.UnicIdentifier.label('FK_Sensor')) self.selectable.append(SensorType.Name.label('FK_SensorType')) self.selectable.append(Sensor.Model.label('FK_SensorModel')) return joinTable
def fetch_factor_range(self, universe: Universe, factors: Union[Transformer, Iterable[object]], start_date: str = None, end_date: str = None, dates: Iterable[str] = None, external_data: pd.DataFrame = None, used_factor_tables=None) -> pd.DataFrame: if isinstance(factors, Transformer): transformer = factors else: transformer = Transformer(factors) dependency = transformer.dependency if used_factor_tables: factor_cols = _map_factors(dependency, used_factor_tables) else: factor_cols = _map_factors(dependency, factor_tables) big_table = Market joined_tables = set() joined_tables.add(Market.__table__.name) for t in set(factor_cols.values()): if t.__table__.name not in joined_tables: if dates is not None: big_table = outerjoin(big_table, t, and_(Market.trade_date == t.trade_date, Market.code == t.code, Market.trade_date.in_(dates))) else: big_table = outerjoin(big_table, t, and_(Market.trade_date == t.trade_date, Market.code == t.code, Market.trade_date.between(start_date, end_date))) joined_tables.add(t.__table__.name) universe_df = universe.query(self, start_date, end_date, dates) query = select( [Market.trade_date, Market.code, Market.chgPct] + list(factor_cols.keys())) \ .select_from(big_table).where( and_( Market.code.in_(universe_df.code.unique().tolist()), Market.trade_date.in_(dates) if dates is not None else Market.trade_date.between(start_date, end_date) ) ).distinct() df = pd.read_sql(query, self.engine).replace([-np.inf, np.inf], np.nan) if external_data is not None: df = pd.merge(df, external_data, on=['trade_date', 'code']).dropna() df.sort_values(['trade_date', 'code'], inplace=True) df.set_index('trade_date', inplace=True) res = transformer.transform('code', df).replace([-np.inf, np.inf], np.nan) res['chgPct'] = df.chgPct res = res.reset_index() return pd.merge(res, universe_df[['trade_date', 'code']], how='inner').drop_duplicates(['trade_date', 'code'])
def GetJoinTable(self, searchInfo): startDate = datetime.now() if self.startDate: startDate = self.startDate StatusTable = Base.metadata.tables['IndividualStatus'] EquipmentTable = Base.metadata.tables['IndividualEquipment'] joinTable = super().GetJoinTable(searchInfo) joinTable = outerjoin(joinTable, StatusTable, StatusTable.c['FK_Individual'] == Individual.ID) self.selectable.append(StatusTable.c['Status_'].label('Status_')) joinTable = outerjoin( joinTable, EquipmentTable, and_( Individual.ID == EquipmentTable.c['FK_Individual'], and_( or_(EquipmentTable.c['EndDate'] >= startDate, EquipmentTable.c['EndDate'] == None), EquipmentTable.c['StartDate'] <= startDate))) joinTable = outerjoin(joinTable, Sensor, Sensor.ID == EquipmentTable.c['FK_Sensor']) joinTable = outerjoin(joinTable, SensorType, Sensor.FK_SensorType == SensorType.ID) self.selectable.append(Sensor.UnicIdentifier.label('FK_Sensor')) self.selectable.append(SensorType.Name.label('FK_SensorType')) self.selectable.append(Sensor.Model.label('FK_SensorModel')) return joinTable
def GetJoinTable(self, searchInfo): startDate = datetime.now() if self.startDate: startDate = self.startDate StatusTable = Base.metadata.tables['IndividualStatus'] EquipmentTable = Base.metadata.tables['IndividualEquipment'] joinTable = super().GetJoinTable(searchInfo) releaseFilter = list(filter(lambda x: x['Column'] == 'LastImported', searchInfo['criteria'])) if len(releaseFilter) > 0: return joinTable joinTable = outerjoin(joinTable, StatusTable, StatusTable.c[ 'FK_Individual'] == Individual.ID) self.selectable.append(StatusTable.c['Status_'].label('Status_')) joinTable = outerjoin(joinTable, EquipmentTable, and_(Individual.ID == EquipmentTable.c['FK_Individual'], and_(or_(EquipmentTable.c['EndDate'] >= startDate, EquipmentTable.c['EndDate'] == None), EquipmentTable.c['StartDate'] <= startDate))) joinTable = outerjoin(joinTable, Sensor, Sensor.ID == EquipmentTable.c['FK_Sensor']) joinTable = outerjoin(joinTable, SensorType, Sensor.FK_SensorType == SensorType.ID) self.selectable.append(Sensor.UnicIdentifier.label('FK_Sensor')) self.selectable.append(SensorType.Name.label('FK_SensorType')) self.selectable.append(Sensor.Model.label('FK_SensorModel')) return joinTable
def load_top_40(post): a = sa_post.alias() expr = outerjoin(a, sa_post, a.c.parent_id == sa_post.c.id) for i in range(1): new_a = sa_post.alias() expr = outerjoin(expr, new_a, expr.c.v0_post_id == new_a.c.parent_id) return expr
def extend_from(self, _from): lastPositionView = Base.metadata.tables['MonitoredSitePositionsNow'] EquipmentTable = Base.metadata.tables['MonitoredSiteEquipment'] join_table = outerjoin( _from, lastPositionView, MonitoredSite.ID == lastPositionView.c['FK_MonitoredSite']) join_table = outerjoin( join_table, EquipmentTable, and_( MonitoredSite.ID == EquipmentTable.c['FK_MonitoredSite'], or_(EquipmentTable.c['EndDate'] == None, EquipmentTable.c['EndDate'] >= func.now()))) join_table = outerjoin(join_table, Sensor, Sensor.ID == EquipmentTable.c['FK_Sensor']) join_table = outerjoin(join_table, SensorType, Sensor._type_id == SensorType.ID) self.selectable.extend([ lastPositionView.c['LAT'].label('LAT'), lastPositionView.c['LON'].label('LON'), lastPositionView.c['ELE'].label('ELE'), lastPositionView.c['StartDate'].label('StartDate') ]) self.selectable.append(Sensor.UnicIdentifier.label('FK_Sensor')) self.selectable.append(SensorType.Name.label('FK_SensorType')) self.selectable.append(Sensor.Model.label('FK_SensorModel')) return join_table
def extend_from(self, _from): station_columns = [ # Station.Name, Station.LAT, Station.LON, Station.StationDate, Station.creator ] observation_columns = [Observation.ID.label('observation_id')] Taxref = Base.metadata.tables['TAXREF'] # taxref_id_column = self.get_column_by_name('taxref_id') # join_table = outerjoin(_from, Taxref, taxref_id_column == Taxref.c['CD_NOM']) join_table = outerjoin(_from, Station, Observation.FK_Station == Station.ID) join_table = outerjoin(join_table, User, Station.creator == User.id) join_table = outerjoin(join_table, Project, Station.FK_Project == Project.ID) join_table = outerjoin(join_table, Client, Project.FK_Client == Client.ID) self.fk_join_list.append(Station.__table__) self.selectable.extend(station_columns) self.selectable.extend(observation_columns) self.selectable.extend( [User.Lastname, User.Firstname, Client.Name.label('ClientName')]) print(self.selectable) return join_table
def test_nested_joins(self): task, Task_Type, Joined, prj, task_type, msg = (self.tables.task, self.classes.Task_Type, self.classes.Joined, self.tables.prj, self.tables.task_type, self.tables.msg) # this is testing some subtle column resolution stuff, # concerning corresponding_column() being extremely accurate # as well as how mapper sets up its column properties mapper(Task_Type, task_type) tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id == prj.c.id) j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id) jj = sa.select([task.c.id.label('task_id'), sa.func.count(msg.c.id).label('props_cnt')], from_obj=[j], group_by=[task.c.id]).alias('prop_c_s') jjj = sa.join(task, jj, task.c.id == jj.c.task_id) mapper(Joined, jjj, properties=dict( type=relationship(Task_Type, lazy='joined'))) session = create_session() eq_(session.query(Joined).limit(10).offset(0).one(), Joined(id=1, title='task 1', props_cnt=0))
def GetJoinTable(self, searchInfo): startDate = datetime.now() if self.startDate: startDate = self.startDate StatusTable = Base.metadata.tables['IndividualStatus'] EquipmentTable = Base.metadata.tables['IndividualEquipment'] joinTable = super().GetJoinTable(searchInfo) joinTable = outerjoin(joinTable, StatusTable, StatusTable.c[ 'FK_Individual'] == Individual.ID) self.selectable.append(StatusTable.c['Status_'].label('Status_')) joinTable = outerjoin(joinTable, EquipmentTable, and_(Individual.ID == EquipmentTable.c['FK_Individual'], and_(or_( EquipmentTable.c['EndDate'] >= startDate, EquipmentTable.c['EndDate'] == None), EquipmentTable.c['StartDate'] <= startDate))) # EquipmentTable.c['EndDate'] >= func.isnull(EquipmentTable.c['EndDate' joinTable = outerjoin(joinTable, Sensor, Sensor.ID == EquipmentTable.c['FK_Sensor']) joinTable = outerjoin(joinTable, SensorType, Sensor.FK_SensorType == SensorType.ID) self.selectable.append(Sensor.UnicIdentifier.label('FK_Sensor')) self.selectable.append(SensorType.Name.label('FK_SensorType')) self.selectable.append(Sensor.Model.label('FK_SensorModel')) return joinTable
def test_nested_joins(self): task, Task_Type, Joined, prj, task_type, msg = (self.tables.task, self.classes.Task_Type, self.classes.Joined, self.tables.prj, self.tables.task_type, self.tables.msg) # this is testing some subtle column resolution stuff, # concerning corresponding_column() being extremely accurate # as well as how mapper sets up its column properties mapper(Task_Type, task_type) tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id == prj.c.id) j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id) jj = sa.select([ task.c.id.label('task_id'), sa.func.count(msg.c.id).label('props_cnt') ], from_obj=[j], group_by=[task.c.id]).alias('prop_c_s') jjj = sa.join(task, jj, task.c.id == jj.c.task_id) mapper(Joined, jjj, properties=dict(type=relationship(Task_Type, lazy='joined'))) session = create_session() eq_( session.query(Joined).limit(10).offset(0).one(), Joined(id=1, title='task 1', props_cnt=0))
def _baseline_7_multiview(self): Zoo = self.metadata.tables['Zoo'] Animal = self.metadata.tables['Animal'] engine = self.metadata.bind def fulltable(select): """Iterate over the full result table.""" return [list(row) for row in engine.execute(select).fetchall()] for x in range(ITERATIONS): fulltable( select([Zoo.c.ID] + list(Animal.c), Zoo.c.Name == 'San Diego Zoo', from_obj=[join(Zoo, Animal)])) Zoo.select(Zoo.c.Name == 'San Diego Zoo') fulltable( select([Zoo.c.ID, Animal.c.ID], and_(Zoo.c.Name == 'San Diego Zoo', Animal.c.Species == 'Leopard'), from_obj=[join(Zoo, Animal)])) # Now try the same query with INNER, LEFT, and RIGHT JOINs. fulltable( select([Zoo.c.Name, Animal.c.Species], from_obj=[join(Zoo, Animal)])) fulltable( select([Zoo.c.Name, Animal.c.Species], from_obj=[outerjoin(Zoo, Animal)])) fulltable( select([Zoo.c.Name, Animal.c.Species], from_obj=[outerjoin(Animal, Zoo)]))
def GetJoinTable(self, searchInfo): ObservationTable = Base.metadata.tables['Observation'] obsValTable = Base.metadata.tables['observationdynpropvaluenow'] joinTable = super().GetJoinTable(searchInfo) joinTable = outerjoin(joinTable, ObservationTable, ObservationTable.c['FK_Station'] == Station.ID) joinTable = outerjoin(joinTable, obsValTable, and_(ObservationTable.c['ID'] == obsValTable.c['FK_Observation'], obsValTable.c['Name'] == 'nom_vernaculaire')) self.selectable.append( func.string_agg( obsValTable.c['ValueString'], aggregate_order_by(literal_column("','"), Station.ID )).label('nom_vernaculaire')) self.selectable.append( func.string_agg( ObservationTable.c['taxon'], aggregate_order_by(literal_column("','"), Station.ID )).label('nom_latin')) return joinTable
def GetJoinTable(self, searchInfo): ''' build join table and select statement over all dynamic properties and foreign keys in filter query''' joinTable = self.ObjWithDynProp view = self.GetDynPropValueView() selectable = [self.ObjWithDynProp.ID] objTable = self.ObjWithDynProp.__table__ self.firstStartDate = None ##### get all foreign keys ##### self.fk_list = { fk.parent.name: fk for fk in self.ObjWithDynProp.__table__.foreign_keys} self.searchInFK = {} for objConf in self.GetAllPropNameInConf(): curDynProp = self.GetDynProp(objConf.Name) if objConf.Name in self.fk_list and objConf.QueryName is not None and objConf.QueryName != 'Forced': tableRef = self.fk_list[objConf.Name].column.table nameRef = self.fk_list[objConf.Name].column.name self.searchInFK[objConf.Name] = { 'nameProp': objConf.QueryName, 'table': tableRef, 'nameFK': nameRef} joinTable = outerjoin(joinTable, tableRef, objTable.c[ objConf.Name] == tableRef.c[nameRef]) selectable.append(tableRef.c[objConf.QueryName].label( objConf.Name + '_' + objConf.QueryName)) elif curDynProp != None: v = view.alias('v' + curDynProp['Name']) self.vAliasList['v' + curDynProp['Name']] = v if self.history is False or self.firstStartDate is None: # firstDate depricated ? joinTable = outerjoin( joinTable, v, and_(self.ObjWithDynProp.ID == v.c[self.ObjWithDynProp().GetSelfFKNameInValueTable()], v.c[ self.ObjWithDynProp().GetDynPropFKName()] == curDynProp['ID']) ) selectable.append( v.c['Value' + curDynProp['TypeProp']].label(curDynProp['Name'])) else: tmpV = self.vAliasList[self.firstStartDate] joinTable = outerjoin( joinTable, v, and_(v.c['StartDate'] == tmpV.c['StartDate'], and_(self.ObjWithDynProp.ID == v.c[self.ObjWithDynProp( ).GetSelfFKNameInValueTable()], v.c[self.ObjWithDynProp().GetDynPropFKName()] == curDynProp['ID'])) ) selectable.append( v.c['Value' + curDynProp['TypeProp']].label(curDynProp['Name'])) elif self.optionView is not None and objConf.Name in self.optionView.c: if self.optionView.name not in self.vAliasList: joinTable = outerjoin(joinTable, self.optionView, self.ObjWithDynProp.ID == self.optionView.c[ 'FK_' + self.ObjWithDynProp.__tablename__]) self.vAliasList[self.optionView.name] = self.optionView selectable.append(self.optionView.c[objConf.Name]) elif hasattr(self.ObjWithDynProp, objConf.Name): selectable.append(objTable.c[objConf.Name]) self.selectable = selectable return joinTable
def GetJoinTable (self,searchInfo) : ''' build join table and select statement over all dynamic properties and foreign keys in filter query''' joinTable = self.ObjWithDynProp view = self.GetDynPropValueView() selectable = [self.ObjWithDynProp.ID] objTable = self.ObjWithDynProp.__table__ self.firstStartDate = None ##### get all foreign keys ##### self.fk_list = {fk.parent.name : fk for fk in self.ObjWithDynProp.__table__.foreign_keys} self.searchInFK = {} for objConf in self.GetAllPropNameInConf() : curDynProp = self.GetDynProp(objConf.Name) if objConf.Name in self.fk_list and objConf.QueryName is not None and objConf.QueryName != 'Forced': tableRef = self.fk_list[objConf.Name].column.table nameRef = self.fk_list[objConf.Name].column.name self.searchInFK[objConf.Name] = {'nameProp':objConf.QueryName,'table': tableRef, 'nameFK':nameRef} joinTable = outerjoin (joinTable,tableRef,objTable.c[objConf.Name] == tableRef.c[nameRef]) selectable.append(tableRef.c[objConf.QueryName].label(objConf.Name+'_'+objConf.QueryName)) elif curDynProp != None: #and objConf.Name in self.ObjWithDynProp().GetAllProp(): v = view.alias('v'+curDynProp['Name']) self.vAliasList['v'+curDynProp['Name']] = v if self.history is False or self.firstStartDate is None : joinTable = outerjoin( joinTable,v , and_(self.ObjWithDynProp.ID == v.c[self.ObjWithDynProp().GetSelfFKNameInValueTable()] , v.c[self.ObjWithDynProp().GetDynPropFKName()] == curDynProp['ID']) ) selectable.append(v.c['Value'+curDynProp['TypeProp']].label(curDynProp['Name'])) if self.history : selectable.append(v.c['StartDate']) self.firstStartDate = 'v'+curDynProp['Name'] else: tmpV = self.vAliasList[self.firstStartDate] joinTable = outerjoin( joinTable,v , and_(v.c['StartDate'] == tmpV.c['StartDate'],and_(self.ObjWithDynProp.ID == v.c[self.ObjWithDynProp().GetSelfFKNameInValueTable()] , v.c[self.ObjWithDynProp().GetDynPropFKName()] == curDynProp['ID'])) ) selectable.append(v.c['Value'+curDynProp['TypeProp']].label(curDynProp['Name'])) elif self.optionView is not None and objConf.Name in self.optionView.c: if self.optionView.name not in self.vAliasList: joinTable = outerjoin(joinTable,self.optionView, self.ObjWithDynProp.ID == self.optionView.c['FK_'+self.ObjWithDynProp.__tablename__]) self.vAliasList[self.optionView.name] = self.optionView selectable.append(self.optionView.c[objConf.Name]) elif hasattr(self.ObjWithDynProp,objConf.Name): selectable.append(objTable.c[objConf.Name]) self.selectable = selectable return joinTable
def get_query(self): t = db.metadata.tables tables = set() columns = [] for i in self.fields_to_export: table = 'movies' column = i.split('.') if len(column) > 1: table = column[0] column = column[1] if table not in t: log.warning("Wrong table name: %s", table) continue tables.add(table) # will be used to generate JOIN else: column = column[0] if column in t[table].columns: columns.append(t[table].columns[column]) else: log.warning("Wrong field name: %s", i) joins = [] if 'media' in tables: joins.append((t['media'], t['movies'].c.medium_id == t['media'].c.medium_id)) if 'collections' in tables: joins.append((t['collections'], t['movies'].c.collection_id == t['collections'].c.collection_id)) if 'volumes' in tables: joins.append((t['volumes'], t['movies'].c.volume_id == t['volumes'].c.volume_id)) if 'vcodecs' in tables: joins.append((t['vcodecs'], t['movies'].c.vcodec_id == t['vcodecs'].c.vcodec_id)) if joins: from_obj = [outerjoin(t['movies'], *(joins[0]))] for j in joins[1:]: from_obj.append(outerjoin(from_obj[-1], *j)) query = select(columns=columns, bind=self.db.session.bind, from_obj=from_obj, use_labels=True) else: query = select(columns=columns, bind=self.db.session.bind) query = update_whereclause(query, self.search_conditions) # save column names (will contain 'movies_title' or 'title' depending on how many tables were requested) self.exported_columns = query.columns return query
def fetch_factor_range_forward(self, universe: Universe, factors: Union[Transformer, object], start_date: str = None, end_date: str = None, dates: Iterable[str] = None): if isinstance(factors, Transformer): transformer = factors else: transformer = Transformer(factors) dependency = transformer.dependency factor_cols = _map_factors(dependency, factor_tables) codes = universe.query(self, start_date, end_date, dates) total_codes = codes.code.unique().tolist() total_dates = codes.trade_date.astype(str).unique().tolist() big_table = Market joined_tables = set() joined_tables.add(Market.__table__.name) for t in set(factor_cols.values()): if t.__table__.name not in joined_tables: if dates is not None: big_table = outerjoin( big_table, t, and_(Market.trade_date == t.trade_date, Market.code == t.code, Market.trade_date.in_(dates))) else: big_table = outerjoin( big_table, t, and_(Market.trade_date == t.trade_date, Market.code == t.code, Market.trade_date.between(start_date, end_date))) joined_tables.add(t.__table__.name) stats = func.lag(list(factor_cols.keys())[0], -1).over(partition_by=Market.code, order_by=Market.trade_date).label('dx') query = select([Market.trade_date, Market.code, Market.chgPct, stats]).select_from(big_table).where( and_(Market.trade_date.in_(total_dates), Market.code.in_(total_codes))) df = pd.read_sql(query, self.engine) \ .replace([-np.inf, np.inf], np.nan) \ .sort_values(['trade_date', 'code']) return pd.merge(df, codes[['trade_date', 'code']], how='inner').drop_duplicates(['trade_date', 'code'])
def whereInEquipement(self, fullQueryJoin, criteria): sensorObj = list( filter(lambda x: x['Column'] in ['FK_Sensor', 'FK_SensorType'], criteria))[0] sensor = sensorObj['Value'] criteria_column = sensorObj['Column'] if criteria_column == 'FK_Sensor': criteria_column = Sensor.UnicIdentifier if criteria_column == 'FK_SensorType': criteria_column = Sensor.FK_SensorType table = Base.metadata.tables['IndividualEquipment'] joinTable = outerjoin(table, Sensor, table.c['FK_Sensor'] == Sensor.ID) joinTable = outerjoin(joinTable, SensorType, Sensor.FK_SensorType == SensorType.ID) startDate = datetime.now() if self.startDate: startDate = self.startDate subSelect = select( [table.c['FK_Individual']]).select_from(joinTable).where( Individual.ID == table.c['FK_Individual']).where( table.c['StartDate'] <= startDate) if sensorObj['Operator'].lower() in ['is null', 'is not null']: if not self.history: subSelect = subSelect.where( or_(table.c['EndDate'] >= startDate, table.c['EndDate'] == None)) else: subSelect = subSelect.where( eval_.eval_binary_expr(criteria_column, sensorObj['Operator'], sensor)) if not self.history: subSelect = subSelect.where( or_(table.c['EndDate'] >= startDate, table.c['EndDate'] == None)) if 'is not' in sensorObj['Operator'].lower(): if sensorObj['Operator'].lower() == 'is not null': fullQueryJoin = fullQueryJoin.where(exists(subSelect)) else: fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else: if sensorObj['Operator'].lower() == 'is null': fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else: fullQueryJoin = fullQueryJoin.where(exists(subSelect)) return fullQueryJoin
def GetJoinTable (self,searchInfo) : curEquipmentTable = Base.metadata.tables['CurrentlySensorEquiped'] MonitoredSiteTable = Base.metadata.tables['MonitoredSite'] joinTable = super().GetJoinTable(searchInfo) joinTable = outerjoin(joinTable,curEquipmentTable,curEquipmentTable.c['FK_Sensor'] == Sensor.ID) joinTable = outerjoin(joinTable,MonitoredSite,MonitoredSiteTable.c['ID'] == curEquipmentTable.c['FK_MonitoredSite']) self.selectable.append(MonitoredSiteTable.c['Name'].label('FK_MonitoredSiteName')) self.selectable.append(curEquipmentTable.c['FK_Individual'].label('FK_Individual')) return joinTable
def get_query(self): tables = set() columns = [] for i in self.fields_to_export: table = "movies" column = i.split(".") if len(column) > 1: table = column[0] column = column[1] if table not in db.tables: log.warning("Wrong table name: %s", table) continue tables.add(table) # will be used to generate JOIN else: column = column[0] if column in db.tables[table].columns: columns.append(db.tables[table].columns[column]) else: log.warning("Wrong field name: %s", i) joins = [] if "media" in tables: joins.append((db.tables["media"], db.tables["movies"].c.medium_id == db.tables["media"].c.medium_id)) if "collections" in tables: joins.append( ( db.tables["collections"], db.tables["movies"].c.collection_id == db.tables["collections"].c.collection_id, ) ) if "volumes" in tables: joins.append((db.tables["volumes"], db.tables["movies"].c.volume_id == db.tables["volumes"].c.volume_id)) if "vcodecs" in tables: joins.append((db.tables["vcodecs"], db.tables["movies"].c.vcodec_id == db.tables["vcodecs"].c.vcodec_id)) if joins: from_obj = [outerjoin(db.tables["movies"], *(joins[0]))] for j in joins[1:]: from_obj.append(outerjoin(from_obj[-1], *j)) query = select(columns=columns, bind=self.db.session.bind, from_obj=from_obj, use_labels=True) else: query = select(columns=columns, bind=self.db.session.bind) query = update_whereclause(query, self.search_conditions) # save column names (will contain 'movies_title' or 'title' depending on how many tables were requested) self.exported_columns = query.columns return query
def delete_datasets( app, cutoff_time, remove_from_disk, info_only = False, force_retry = False ): # Marks datasets as deleted if associated items are all deleted. start = time.time() if force_retry: history_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id, app.model.Dataset.table.c.state ), whereclause = app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, from_obj = [ sa.outerjoin( app.model.Dataset.table, app.model.HistoryDatasetAssociation.table ) ] ) library_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id, app.model.Dataset.table.c.state ), whereclause = app.model.LibraryDatasetDatasetAssociation.table.c.update_time < cutoff_time, from_obj = [ sa.outerjoin( app.model.Dataset.table, app.model.LibraryDatasetDatasetAssociation.table ) ] ) else: # We really only need the id column here, but sqlalchemy barfs when trying to select only 1 column history_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id, app.model.Dataset.table.c.state ), whereclause = sa.and_( app.model.Dataset.table.c.deleted == False, app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time, app.model.HistoryDatasetAssociation.table.c.deleted == True ), from_obj = [ sa.outerjoin( app.model.Dataset.table, app.model.HistoryDatasetAssociation.table ) ] ) library_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id, app.model.Dataset.table.c.state ), whereclause = sa.and_( app.model.Dataset.table.c.deleted == False, app.model.LibraryDatasetDatasetAssociation.table.c.update_time < cutoff_time, app.model.LibraryDatasetDatasetAssociation.table.c.deleted == True ), from_obj = [ sa.outerjoin( app.model.Dataset.table, app.model.LibraryDatasetDatasetAssociation.table ) ] ) history_dataset_ids = [ row.id for row in history_dataset_ids_query.execute() ] library_dataset_ids = [ row.id for row in library_dataset_ids_query.execute() ] dataset_ids = history_dataset_ids + library_dataset_ids skip = [] deleted_dataset_count = 0 deleted_instance_count = 0 for dataset_id in dataset_ids: print "######### Processing dataset id:", dataset_id dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id ) if dataset.id not in skip and _dataset_is_deletable( dataset ): deleted_dataset_count += 1 for dataset_instance in dataset.history_associations + dataset.library_associations: print "Associated Dataset instance: ", dataset_instance.__class__.__name__, dataset_instance.id _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True ) deleted_instance_count += 1 skip.append( dataset.id ) stop = time.time() print "Examined %d datasets, marked %d as deleted and purged %d dataset instances" % ( len( skip ), deleted_dataset_count, deleted_instance_count ) print "Total elapsed time: ", stop - start print "##########################################"
def extend_from(self, _from): curEquipmentTable = Base.metadata.tables['CurrentlySensorEquiped'] MonitoredSiteTable = Base.metadata.tables['MonitoredSite'] table_join = outerjoin(_from, curEquipmentTable, curEquipmentTable.c['FK_Sensor'] == Sensor.ID) table_join = outerjoin( table_join, MonitoredSite, MonitoredSiteTable.c['ID'] == curEquipmentTable.c['FK_MonitoredSite']) self.selectable.append( MonitoredSiteTable.c['Name'].label('FK_MonitoredSiteName')) self.selectable.append( curEquipmentTable.c['FK_Individual'].label('FK_Individual')) return table_join
def per_month_all(self, trans, **kwd): params = util.Params(kwd) message = "" monitor_email = params.get("monitor_email", "*****@*****.**") q = sa.select( ( sa.func.date_trunc("month", sa.func.date(model.Job.table.c.create_time)).label("date"), sa.func.sum(sa.case([(model.User.table.c.email == monitor_email, 1)], else_=0)).label("monitor_jobs"), sa.func.count(model.Job.table.c.id).label("total_jobs"), ), from_obj=[sa.outerjoin(model.Job.table, model.User.table)], group_by=[sa.func.date_trunc("month", sa.func.date(model.Job.table.c.create_time))], order_by=[sa.desc("date")], ) jobs = [] for row in q.execute(): jobs.append( ( row.date.strftime("%Y-%m"), row.total_jobs - row.monitor_jobs, row.monitor_jobs, row.total_jobs, row.date.strftime("%B"), row.date.strftime("%Y"), ) ) return trans.fill_template("/webapps/reports/jobs_per_month_all.mako", jobs=jobs, message=message)
def whereInEquipement(self,fullQueryJoin,criteria): sensorObj = list(filter(lambda x:'FK_Sensor'==x['Column'], criteria))[0] sensor = sensorObj['Value'] table = Base.metadata.tables['IndividualEquipment'] joinTable = outerjoin(table,Sensor, table.c['FK_Sensor'] == Sensor.ID) startDate = datetime.now() if self.startDate : startDate = self.startDate subSelect= select([table.c['FK_Individual']] ).select_from(joinTable).where(Individual.ID== table.c['FK_Individual']).where(table.c['StartDate'] <= startDate) if sensorObj['Operator'].lower() in ['is null','is not null'] : if not self.history : subSelect = subSelect.where(or_(table.c['EndDate'] >= startDate,table.c['EndDate'] == None)) else: subSelect = subSelect.where(eval_.eval_binary_expr(Sensor.UnicIdentifier,sensorObj['Operator'],sensor)) if not self.history : subSelect = subSelect.where(or_(table.c['EndDate'] >= startDate,table.c['EndDate'] == None)) if 'is not' in sensorObj['Operator'].lower(): if sensorObj['Operator'].lower() == 'is not null' : fullQueryJoin = fullQueryJoin.where(exists(subSelect)) else : fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else : if sensorObj['Operator'].lower() == 'is null' : fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else: fullQueryJoin = fullQueryJoin.where(exists(subSelect)) return fullQueryJoin
async def posts(request): posts = [] comments_by_post_id = defaultdict(list) async with request.app.db.acquire() as conn: join = sa.outerjoin(db.post, db.author) stmt = sa.select([ db.post.c.id, db.post.c.content, db.author.c.name.label('author') ]).select_from(join) async for post in await conn.execute(stmt): posts.append({column: value for column, value in post.items()}) post_ids = [post['id'] for post in posts] join = db.comment.join(db.comments_posts) comments = await conn.execute( sa.select([ db.comment.c.id, db.comment.c.content, db.comments_posts.c.post_id ]).select_from(join).where( db.comments_posts.c.post_id.in_(post_ids))) async for comment in comments: # serialized_comment = { # column: value for column, value in comment.items()} comments_by_post_id[comment.post_id].append( dict(content=comment.content)) for post in posts: post.setdefault('comments', []) comments = comments_by_post_id.get(post['id']) comments and post['comments'].extend(comments) return response.json(dict(posts=posts))
def history_per_user(self, trans, **kwd): message = escape(util.restore_text(kwd.get('message', ''))) user_cutoff = int(kwd.get('user_cutoff', 60)) sorting = 0 if kwd.get('sorting', 'User') == 'User' else 1 descending = 1 if kwd.get('descending', 'desc') == 'desc' else -1 sorting_functions = [ lambda first, second: descending if first[0].lower() > second[0].lower() else -descending, lambda first, second: descending if first[1] < second[1] else -descending] req = sa.select( (sa.func.count(galaxy.model.History.table.c.id).label('history'), galaxy.model.User.table.c.username.label('username')), from_obj=[sa.outerjoin(galaxy.model.History.table, galaxy.model.User.table)], whereclause=galaxy.model.History.table.c.user_id == galaxy.model.User.table.c.id, group_by=['username'], order_by=[sa.desc('username'), 'history']) histories = [(_.username if _.username is not None else "Unknown", _.history) for _ in req.execute()] histories.sort(sorting_functions[sorting]) if user_cutoff != 0: histories = histories[:user_cutoff] return trans.fill_template('/webapps/reports/history_per_user.mako', histories=histories, user_cutoff=user_cutoff, sorting=sorting, descending=descending, message=message)
def specified_month_in_error( self, trans, **kwd ): params = util.Params( kwd ) message = '' # If specified_date is not received, we'll default to the current month specified_date = kwd.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) ) specified_month = specified_date[ :7 ] year, month = map( int, specified_month.split( "-" ) ) start_date = date( year, month, 1 ) end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] ) month_label = start_date.strftime( "%B" ) year_label = start_date.strftime( "%Y" ) q = sa.select( ( sa.func.date( model.Job.table.c.create_time ).label( 'date' ), sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ), whereclause = sa.and_( model.Job.table.c.state == 'error', model.Job.table.c.create_time >= start_date, model.Job.table.c.create_time < end_date ), from_obj = [ sa.outerjoin( model.Job.table, model.User.table ) ], group_by = [ 'date' ], order_by = [ sa.desc( 'date' ) ] ) jobs = [] for row in q.execute(): jobs.append( ( row.date.strftime( "%A" ), row.date, row.total_jobs, row.date.strftime( "%d" ) ) ) return trans.fill_template( '/webapps/reports/jobs_specified_month_in_error.mako', month_label=month_label, year_label=year_label, month=month, jobs=jobs, message=message )
def reorient_cdrs(self): conn = meta.bind.connect() try: net_cdr_j = sa.outerjoin(net_table, conductor_table, onclause=net_to_cdr_join) unit_fan_order_q = sa.select( [net_table.c.unit, sa.func.count(conductor_table.c.cable) .label('cdr_count')], from_obj=[net_cdr_j], group_by=[net_table.c.unit], order_by=[sa.asc('cdr_count'), net_table.c.unit]) unit_fan_order = [r[0] for r in conn.execute(unit_fan_order_q)] finally: conn.close() s = self._get_ses() s.begin() try: for unit in unit_fan_order: b_cdrs = (s.query(Conductor) .options(orm.joinedload('a_net'), orm.joinedload('b_net')) .filter(Conductor.b_net_unit==unit)) for cdr in b_cdrs: cdr.swap_orientation() s.flush() s.commit() except: s.rollback() raise finally: self._close_ses()
def getEquipment(self): _id = self.objectDB.ID table = Base.metadata.tables['SensorEquipment'] joinTable = join(table, Sensor, table.c['FK_Sensor'] == Sensor.ID) joinTable = outerjoin(joinTable, MonitoredSite, table.c[ 'FK_MonitoredSite'] == MonitoredSite.ID) query = select([table.c['StartDate'], table.c['EndDate'], Sensor.UnicIdentifier, MonitoredSite.Name, MonitoredSite.ID.label('MonitoredSiteID'), table.c['FK_Individual']] ).select_from(joinTable ).where(table.c['FK_Sensor'] == _id ).order_by(desc(table.c['StartDate'])) result = self.session.execute(query).fetchall() response = [] for row in result: curRow = OrderedDict(row) curRow['StartDate'] = curRow['StartDate'].strftime('%Y-%m-%d %H:%M:%S') curRow['EndDate'] = curRow['EndDate'].strftime( '%Y-%m-%d %H:%M:%S') if curRow['EndDate'] is not None else None curRow['format'] = 'YYYY-MM-DD HH:mm:ss' response.append(curRow) return response
def create_incomplete_consumers(ctx, batch_size): """Finds all the consumer records that are missing for allocations and creates consumer records for them, using the "incomplete consumer" project and user CONF options. Returns a tuple containing two identical elements with the number of consumer records created, since this is the expected return format for data migration routines. """ # Create a record in the projects table for our incomplete project incomplete_proj_id = project_obj.ensure_incomplete_project(ctx) # Create a record in the users table for our incomplete user incomplete_user_id = user_obj.ensure_incomplete_user(ctx) # Create a consumer table record for all consumers where # allocations.consumer_id doesn't exist in the consumers table. Use the # incomplete consumer project and user ID. alloc_to_consumer = sa.outerjoin( _ALLOC_TBL, CONSUMER_TBL, _ALLOC_TBL.c.consumer_id == CONSUMER_TBL.c.uuid) cols = [ _ALLOC_TBL.c.consumer_id, incomplete_proj_id, incomplete_user_id, ] sel = sa.select(cols) sel = sel.select_from(alloc_to_consumer) sel = sel.where(CONSUMER_TBL.c.id.is_(None)) sel = sel.limit(batch_size) target_cols = ['uuid', 'project_id', 'user_id'] ins_stmt = CONSUMER_TBL.insert().from_select(target_cols, sel) res = ctx.session.execute(ins_stmt) return res.rowcount, res.rowcount
def test_nested_joins(self): task, Task_Type, Joined, task_type, msg = ( self.tables.task, self.classes.Task_Type, self.classes.Joined, self.tables.task_type, self.tables.msg, ) # this is testing some subtle column resolution stuff, # concerning corresponding_column() being extremely accurate # as well as how mapper sets up its column properties mapper(Task_Type, task_type) j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id) jj = (sa.select( task.c.id.label("task_id"), sa.func.count(msg.c.id).label("props_cnt"), ).select_from(j).group_by(task.c.id).alias("prop_c_s")) jjj = sa.join(task, jj, task.c.id == jj.c.task_id) mapper( Joined, jjj, properties=dict(type=relationship(Task_Type, lazy="joined")), ) session = fixture_session() eq_( session.query(Joined).order_by( Joined.id).limit(10).offset(0).one(), Joined(id=1, title="task 1", props_cnt=0), )
def history_per_user(self, trans, **kwd): message = escape(util.restore_text(kwd.get('message', ''))) user_cutoff = int(kwd.get('user_cutoff', 60)) sorting = 0 if kwd.get('sorting', 'User') == 'User' else 1 descending = 1 if kwd.get('descending', 'desc') == 'desc' else -1 reverse = descending == 1 sort_keys = (lambda v: v[0].lower(), lambda v: v[1]) req = sa.select( (sa.func.count(galaxy.model.History.table.c.id).label('history'), galaxy.model.User.table.c.username.label('username')), from_obj=[ sa.outerjoin(galaxy.model.History.table, galaxy.model.User.table) ], whereclause=galaxy.model.History.table.c.user_id == galaxy.model.User.table.c.id, group_by=['username'], order_by=[sa.desc('username'), 'history']) histories = [(_.username if _.username is not None else "Unknown", _.history) for _ in req.execute()] histories.sort(key=sort_keys[sorting], reverse=reverse) if user_cutoff != 0: histories = histories[:user_cutoff] return trans.fill_template('/webapps/reports/history_per_user.mako', histories=histories, user_cutoff=user_cutoff, sorting=sorting, descending=descending, message=message)
def per_user(self, trans, **kwd): params = util.Params(kwd) message = '' monitor_email = params.get('monitor_email', '*****@*****.**') specs = sorter('user_email', kwd) sort_id = specs.sort_id order = specs.order arrow = specs.arrow _order = specs.exc_order jobs = [] jobs_per_user = sa.select( (model.User.table.c.email.label('user_email'), sa.func.count(model.Job.table.c.id).label('total_jobs')), from_obj=[sa.outerjoin(model.Job.table, model.User.table)], group_by=['user_email'], order_by=[_order]) for row in jobs_per_user.execute(): if (row.user_email is None): jobs.append(('Anonymous', row.total_jobs)) elif (row.user_email == monitor_email): continue else: jobs.append((row.user_email, row.total_jobs)) return trans.fill_template('/webapps/reports/jobs_per_user.mako', order=order, arrow=arrow, sort_id=sort_id, jobs=jobs, message=message)
def per_month_all(self, trans, **kwd): message = '' specs = sorter('date', kwd) sort_id = specs.sort_id order = specs.order arrow = specs.arrow _order = specs.exc_order q = sa.select( (self.select_month( model.StoredWorkflow.table.c.create_time).label('date'), sa.func.count( model.StoredWorkflow.table.c.id).label('total_workflows')), from_obj=[ sa.outerjoin(model.StoredWorkflow.table, model.User.table) ], group_by=self.group_by_month( model.StoredWorkflow.table.c.create_time), order_by=[_order]) workflows = [] for row in q.execute(): workflows.append( (row.date.strftime("%Y-%m"), row.total_workflows, row.date.strftime("%B"), row.date.strftime("%Y"))) return trans.fill_template( '/webapps/reports/workflows_per_month_all.mako', order=order, arrow=arrow, sort_id=sort_id, workflows=workflows, message=message)
def per_user(self, trans, **kwd): message = '' specs = sorter('user_email', kwd) sort_id = specs.sort_id order = specs.order arrow = specs.arrow _order = specs.exc_order workflows = [] q = sa.select( (model.User.table.c.email.label('user_email'), sa.func.count( model.StoredWorkflow.table.c.id).label('total_workflows')), from_obj=[ sa.outerjoin(model.StoredWorkflow.table, model.User.table) ], group_by=['user_email'], order_by=[_order]) for row in q.execute(): workflows.append((row.user_email, row.total_workflows)) return trans.fill_template('/webapps/reports/workflows_per_user.mako', order=order, arrow=arrow, sort_id=sort_id, workflows=workflows, message=message)
def per_month_all(self, trans, **kwd): params = util.Params(kwd) message = '' q = sa.select( (sa.func.date_trunc( 'month', sa.func.date( model.StoredWorkflow.table.c.create_time)).label('date'), sa.func.count( model.StoredWorkflow.table.c.id).label('total_workflows')), from_obj=[ sa.outerjoin(model.StoredWorkflow.table, model.User.table) ], group_by=[ sa.func.date_trunc( 'month', sa.func.date(model.StoredWorkflow.table.c.create_time)) ], order_by=[sa.desc('date')]) workflows = [] for row in q.execute(): workflows.append( (row.date.strftime("%Y-%m"), row.total_workflows, row.date.strftime("%B"), row.date.strftime("%Y"))) return trans.fill_template( '/webapps/reports/workflows_per_month_all.mako', workflows=workflows, message=message)
def _get_provider_by_uuid(context, uuid): """Given a UUID, return a dict of information about the resource provider from the database. :raises: NotFound if no such provider was found :param uuid: The UUID to look up """ rpt = sa.alias(_RP_TBL, name="rp") parent = sa.alias(_RP_TBL, name="parent") root = sa.alias(_RP_TBL, name="root") rp_to_root = sa.join(rpt, root, rpt.c.root_provider_id == root.c.id) rp_to_parent = sa.outerjoin( rp_to_root, parent, rpt.c.parent_provider_id == parent.c.id) cols = [ rpt.c.id, rpt.c.uuid, rpt.c.name, rpt.c.generation, root.c.uuid.label("root_provider_uuid"), parent.c.uuid.label("parent_provider_uuid"), rpt.c.updated_at, rpt.c.created_at, ] sel = sa.select(cols).select_from(rp_to_parent).where(rpt.c.uuid == uuid) res = context.session.execute(sel).fetchone() if not res: raise exception.NotFound( 'No resource provider with uuid %s found' % uuid) return dict(res)
def create_incomplete_consumers(ctx, batch_size): """Finds all the consumer records that are missing for allocations and creates consumer records for them, using the "incomplete consumer" project and user CONF options. Returns a tuple containing two identical elements with the number of consumer records created, since this is the expected return format for data migration routines. """ # Create a record in the projects table for our incomplete project incomplete_proj_id = project_obj.ensure_incomplete_project(ctx) # Create a record in the users table for our incomplete user incomplete_user_id = user_obj.ensure_incomplete_user(ctx) # Create a consumer table record for all consumers where # allocations.consumer_id doesn't exist in the consumers table. Use the # incomplete consumer project and user ID. alloc_to_consumer = sa.outerjoin( rp_obj._ALLOC_TBL, CONSUMER_TBL, rp_obj._ALLOC_TBL.c.consumer_id == CONSUMER_TBL.c.uuid) cols = [ rp_obj._ALLOC_TBL.c.consumer_id, incomplete_proj_id, incomplete_user_id, ] sel = sa.select(cols) sel = sel.select_from(alloc_to_consumer) sel = sel.where(CONSUMER_TBL.c.id.is_(None)) sel = sel.limit(batch_size) target_cols = ['uuid', 'project_id', 'user_id'] ins_stmt = CONSUMER_TBL.insert().from_select(target_cols, sel) res = ctx.session.execute(ins_stmt) return res.rowcount, res.rowcount
def specified_month_in_error(self, trans, **kwd): params = util.Params(kwd) message = '' # If specified_date is not received, we'll default to the current month specified_date = kwd.get('specified_date', datetime.utcnow().strftime("%Y-%m-%d")) specified_month = specified_date[:7] year, month = map(int, specified_month.split("-")) start_date = date(year, month, 1) end_date = start_date + timedelta( days=calendar.monthrange(year, month)[1]) month_label = start_date.strftime("%B") year_label = start_date.strftime("%Y") q = sa.select( (sa.func.date(model.Job.table.c.create_time).label('date'), sa.func.count(model.Job.table.c.id).label('total_jobs')), whereclause=sa.and_(model.Job.table.c.state == 'error', model.Job.table.c.create_time >= start_date, model.Job.table.c.create_time < end_date), from_obj=[sa.outerjoin(model.Job.table, model.User.table)], group_by=['date'], order_by=[sa.desc('date')]) jobs = [] for row in q.execute(): jobs.append((row.date.strftime("%A"), row.date, row.total_jobs, row.date.strftime("%d"))) return trans.fill_template( '/webapps/reports/jobs_specified_month_in_error.mako', month_label=month_label, year_label=year_label, month=month, jobs=jobs, message=message)
def per_month_all(self, trans, **kwd): params = util.Params(kwd) message = '' monitor_email = params.get('monitor_email', '*****@*****.**') q = sa.select( (sa.func.date_trunc( 'month', sa.func.date( model.Job.table.c.create_time)).label('date'), sa.func.sum( sa.case([(model.User.table.c.email == monitor_email, 1)], else_=0)).label('monitor_jobs'), sa.func.count(model.Job.table.c.id).label('total_jobs')), from_obj=[sa.outerjoin(model.Job.table, model.User.table)], group_by=[ sa.func.date_trunc('month', sa.func.date(model.Job.table.c.create_time)) ], order_by=[sa.desc('date')]) jobs = [] for row in q.execute(): jobs.append( (row.date.strftime("%Y-%m"), row.total_jobs - row.monitor_jobs, row.monitor_jobs, row.total_jobs, row.date.strftime("%B"), row.date.strftime("%Y"))) return trans.fill_template('/webapps/reports/jobs_per_month_all.mako', jobs=jobs, message=message)
def iterdescendants(parent_level=None, child_level=None, bind=_backend.engine): """Yield pairs of (parent id, sorted list of their descendant ids).""" # TODO: implement ancestors/descendants as sa.orm.relationship() # see https://bitbucket.org/zzzeek/sqlalchemy/issues/4165 parent, child = (sa.orm.aliased(Languoid, name=n) for n in ('parent', 'child')) tree = Languoid.tree() select_pairs = sa.select([parent.id, child.id], bind=bind)\ .select_from( sa.outerjoin(parent, tree, tree.c.parent_id == parent.id)\ .outerjoin(child, tree.c.child_id == child.id))\ .order_by(parent.id, child.id) if parent_level is not None: if parent_level == 'top': cond = (parent.parent_id == sa.null()) elif parent_level in LEVEL: cond = (parent.level == parent_level) else: raise ValueError('invalid parent_level: %r' % parent_level) select_pairs = select_pairs.where(cond) if child_level is not None: if child_level not in LEVEL: raise ValueError('invalid child_level: %r' % child_level) select_pairs = select_pairs.where(child.level == child_level) grouped = itertools.groupby(select_pairs.execute(), operator.itemgetter(0)) for parent_id, grp in grouped: _, c = next(grp) if c is None: descendants = [] else: descendants = [c] + [c for _, c in grp] yield parent_id, descendants
def specified_month_in_error( self, trans, **kwd ): params = util.Params( kwd ) msg = '' year, month = map( int, params.get( 'month', datetime.utcnow().strftime( "%Y-%m" ) ).split( "-" ) ) start_date = date( year, month, 1 ) end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] ) month_label = start_date.strftime( "%B" ) year_label = start_date.strftime( "%Y" ) q = sa.select( ( sa.func.date( galaxy.model.Job.table.c.create_time ).label( 'date' ), sa.func.count( galaxy.model.Job.table.c.id ).label( 'total_jobs' ) ), whereclause = sa.and_( galaxy.model.Job.table.c.state == 'error', galaxy.model.Job.table.c.create_time >= start_date, galaxy.model.Job.table.c.create_time < end_date ), from_obj = [ sa.outerjoin( galaxy.model.Job.table, galaxy.model.History.table ).outerjoin( galaxy.model.User.table ) ], group_by = [ 'date' ], order_by = [ sa.desc( 'date' ) ] ) jobs = [] for row in q.execute(): jobs.append( ( row.date.strftime( "%A" ), row.date, row.total_jobs, row.date.strftime( "%d" ) ) ) return trans.fill_template( 'jobs_specified_month_in_error.mako', month_label=month_label, year_label=year_label, month=month, jobs=jobs, msg=msg )
def _create_incomplete_consumer(ctx, consumer_id): # TODO(jaypipes): Remove in Stein after a blocker migration is added. """Creates consumer record if consumer relationship between allocations -> consumers table is missing for the supplied consumer UUID, using the "incomplete consumer" project and user CONF options. """ alloc_to_consumer = sa.outerjoin( _ALLOC_TBL, consumer_obj.CONSUMER_TBL, _ALLOC_TBL.c.consumer_id == consumer_obj.CONSUMER_TBL.c.uuid) sel = sa.select([_ALLOC_TBL.c.consumer_id]) sel = sel.select_from(alloc_to_consumer) sel = sel.where( sa.and_(_ALLOC_TBL.c.consumer_id == consumer_id, consumer_obj.CONSUMER_TBL.c.id.is_(None))) missing = ctx.session.execute(sel).fetchall() if missing: incomplete_proj_id = project_obj.ensure_incomplete_project(ctx) incomplete_user_id = user_obj.ensure_incomplete_user(ctx) ins_stmt = consumer_obj.CONSUMER_TBL.insert().values( uuid=consumer_id, project_id=incomplete_proj_id, user_id=incomplete_user_id) res = ctx.session.execute(ins_stmt) if res.rowcount > 0: LOG.info( "Online data migration to fix incomplete consumers " "for consumer %s has been run. Migrated %d incomplete " "consumer records on the fly.", consumer_id, res.rowcount)
def per_user( self, trans, **kwd ): params = util.Params( kwd ) message = '' monitor_email = params.get( 'monitor_email', '*****@*****.**' ) specs = sorter( 'user_email', kwd ) sort_id = specs.sort_id order = specs.order arrow = specs.arrow _order = specs.exc_order jobs = [] jobs_per_user = sa.select( ( model.User.table.c.email.label( 'user_email' ), sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ), from_obj=[ sa.outerjoin( model.Job.table, model.User.table ) ], group_by=[ 'user_email' ], order_by=[ _order ] ) for row in jobs_per_user.execute(): if ( row.user_email is None ): jobs.append( ( 'Anonymous', row.total_jobs ) ) elif ( row.user_email == monitor_email ): continue else: jobs.append( ( row.user_email, row.total_jobs ) ) return trans.fill_template( '/webapps/reports/jobs_per_user.mako', order=order, arrow=arrow, sort_id=sort_id, jobs=jobs, message=message )
def today_all( self, trans, **kwd ): params = util.Params( kwd ) msg = '' monitor_email = params.get( 'monitor_email', '*****@*****.**' ) year, month, day = map( int, datetime.utcnow().strftime( "%Y-%m-%d" ).split( "-" ) ) start_date = date( year, month, day ) end_date = start_date + timedelta( days=1 ) day_label = start_date.strftime( "%A" ) month_label = start_date.strftime( "%B" ) year_label = start_date.strftime( "%Y" ) day_of_month = start_date.strftime( "%d" ) q = sa.select( ( sa.func.date( galaxy.model.Job.table.c.create_time ).label( 'date' ), sa.func.sum( sa.case( [( galaxy.model.User.table.c.email == monitor_email, 1 )], else_=0 ) ).label( 'monitor_jobs' ), sa.func.count( galaxy.model.Job.table.c.id ).label( 'total_jobs' ) ), whereclause = sa.and_( galaxy.model.Job.table.c.create_time >= start_date, galaxy.model.Job.table.c.create_time < end_date ), from_obj = [ sa.outerjoin( galaxy.model.Job.table, galaxy.model.History.table ).outerjoin( galaxy.model.User.table ) ], group_by = [ 'date' ] ) jobs = [] for row in q.execute(): jobs.append( ( row.date.strftime( "%A" ), row.date, row.total_jobs - row.monitor_jobs, row.monitor_jobs, row.total_jobs, row.date.strftime( "%d" ) ) ) return trans.fill_template( 'jobs_today_all.mako', day_label=day_label, month_label=month_label, year_label=year_label, day_of_month=day_of_month, month=month, jobs=jobs, msg=msg )
def whereInEquipement(self,fullQueryJoin,criteria): sensorObj = list(filter(lambda x:'FK_Sensor'==x['Column'], criteria))[0] sensor = sensorObj['Value'] table = Base.metadata.tables['MonitoredSiteEquipment'] joinTable = outerjoin(table,Sensor, table.c['FK_Sensor'] == Sensor.ID) if sensorObj['Operator'].lower() in ['is','is not'] and sensorObj['Value'].lower() == 'null': subSelect = select([table.c['FK_MonitoredSite']] ).select_from(joinTable).where( and_(MonitoredSite.ID== table.c['FK_MonitoredSite'] ,or_(table.c['EndDate'] >= func.now(),table.c['EndDate'] == None) )) if sensorObj['Operator'].lower() == 'is': fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else : fullQueryJoin = fullQueryJoin.where(exists(subSelect)) else : subSelect = select([table.c['FK_MonitoredSite']] ).select_from(joinTable).where( and_(MonitoredSite.ID== table.c['FK_MonitoredSite'] ,and_(eval_.eval_binary_expr(Sensor.UnicIdentifier,sensorObj['Operator'],sensor) ,or_(table.c['EndDate'] >= func.now(),table.c['EndDate'] == None)) )) fullQueryJoin = fullQueryJoin.where(exists(subSelect)) return fullQueryJoin
def whereInEquipement(self, fullQueryJoin, criteria): sensorObj = list( filter(lambda x: 'FK_Sensor' == x['Column'], criteria))[0] sensor = sensorObj['Value'] table = Base.metadata.tables['MonitoredSiteEquipment'] joinTable = outerjoin(table, Sensor, table.c['FK_Sensor'] == Sensor.ID) if (sensorObj['Operator'].lower() in ['is', 'is not'] and sensorObj['Value'].lower() == 'null'): subSelect = select([table.c['FK_MonitoredSite']] ).select_from(joinTable).where( and_(MonitoredSite.ID == table.c['FK_MonitoredSite'], or_(table.c['EndDate'] >= func.now(), table.c['EndDate'] == None) )) if sensorObj['Operator'].lower() == 'is': fullQueryJoin = fullQueryJoin.where(~exists(subSelect)) else: fullQueryJoin = fullQueryJoin.where(exists(subSelect)) else: subSelect = select([table.c['FK_MonitoredSite']] ).select_from(joinTable).where( and_(MonitoredSite.ID == table.c['FK_MonitoredSite'], and_(eval_.eval_binary_expr(Sensor.UnicIdentifier, sensorObj['Operator'], sensor), or_(table.c['EndDate'] >= func.now(), table.c['EndDate'] == None)) )) fullQueryJoin = fullQueryJoin.where(exists(subSelect)) return fullQueryJoin
def test_outer_join_one(self): table1, table2, table3 = self._test_outer_join_fixture() query = select( [table1, table2], or_( table1.c.name == "fred", table1.c.myid == 10, table2.c.othername != "jack", text("EXISTS (select yay from foo where boo = lar)"), ), from_obj=[ outerjoin(table1, table2, table1.c.myid == table2.c.otherid) ], ) self.assert_compile( query, "SELECT mytable.myid, mytable.name, " "mytable.description, myothertable.otherid," " myothertable.othername FROM mytable, " "myothertable WHERE (mytable.name = " ":name_1 OR mytable.myid = :myid_1 OR " "myothertable.othername != :othername_1 OR " "EXISTS (select yay from foo where boo = " "lar)) AND mytable.myid = " "myothertable.otherid(+)", dialect=oracle.OracleDialect(use_ansi=False), )
def user_for_month( self, trans, **kwd ): params = util.Params( kwd ) msg = '' email = params.get( 'email', None ) if email is not None: # The @ char has been converted to an 'X' email = email.replace( 'X', '@' ) year, month = map( int, params.get( 'month', datetime.utcnow().strftime( "%Y-%m" ) ).split( "-" ) ) start_date = date( year, month, 1 ) end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] ) month_label = start_date.strftime( "%B" ) year_label = start_date.strftime( "%Y" ) jobs = [] q = sa.select( ( galaxy.model.Job.table.c.id, galaxy.model.Job.table.c.state, galaxy.model.Job.table.c.create_time, galaxy.model.Job.table.c.update_time, galaxy.model.Job.table.c.tool_id, galaxy.model.Job.table.c.command_line, galaxy.model.Job.table.c.stderr, galaxy.model.Job.table.c.session_id, ( galaxy.model.Job.table.c.traceback ).label( 'stack_trace' ), galaxy.model.Job.table.c.info, ( galaxy.model.User.table.c.email ).label( 'user_email' ), galaxy.model.GalaxySession.table.c.remote_addr ), whereclause = sa.and_( galaxy.model.User.table.c.email == email, galaxy.model.Job.table.c.create_time >= start_date, galaxy.model.Job.table.c.create_time < end_date ), from_obj = [ sa.outerjoin( galaxy.model.Job.table, galaxy.model.History.table ) \ .outerjoin( galaxy.model.User.table ) \ .outerjoin( galaxy.model.GalaxySession.table, galaxy.model.Job.table.c.session_id == galaxy.model.GalaxySession.table.c.id ) ], order_by = [ sa.desc( galaxy.model.Job.table.c.id ) ] ) for row in q.execute(): remote_host = row.remote_addr if row.remote_addr: try: remote_host = socket.gethostbyaddr( row.remote_addr )[0] except: pass jobs.append( ( row.state, row.id, row.create_time, row.update_time, row.session_id, row.tool_id, row.user_email, remote_host, row.command_line, row.stderr, row.stack_trace, row.info ) ) return trans.fill_template( 'jobs_user_for_month.mako', email=email, month=month, month_label=month_label, year_label=year_label, jobs=jobs, msg=msg )
def specified_date_all( self, trans, **kwd ): params = util.Params( kwd ) msg = '' monitor_email = params.get( 'monitor_email', '*****@*****.**' ) year, month, day = map( int, params.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) ).split( "-" ) ) start_date = date( year, month, day ) end_date = start_date + timedelta( days=1 ) day_label = start_date.strftime( "%A" ) month_label = start_date.strftime( "%B" ) year_label = start_date.strftime( "%Y" ) day_of_month = start_date.strftime( "%d" ) q = sa.select( ( galaxy.model.Job.table.c.id, galaxy.model.Job.table.c.state, galaxy.model.Job.table.c.create_time, galaxy.model.Job.table.c.update_time, galaxy.model.Job.table.c.tool_id, galaxy.model.Job.table.c.command_line, galaxy.model.Job.table.c.stderr, galaxy.model.Job.table.c.session_id, ( galaxy.model.Job.table.c.traceback ).label( 'stack_trace' ), galaxy.model.Job.table.c.info, ( galaxy.model.User.table.c.email ).label( 'user_email' ), galaxy.model.GalaxySession.table.c.remote_addr ), whereclause = sa.and_( galaxy.model.Job.table.c.create_time >= start_date, galaxy.model.Job.table.c.create_time < end_date ), from_obj = [ sa.outerjoin( galaxy.model.Job.table, galaxy.model.History.table ) \ .outerjoin( galaxy.model.User.table ) \ .outerjoin( galaxy.model.GalaxySession.table, galaxy.model.Job.table.c.session_id == galaxy.model.GalaxySession.table.c.id ) ], order_by = [ sa.desc( galaxy.model.Job.table.c.id ) ] ) jobs = [] for row in q.execute(): remote_host = row.remote_addr if row.remote_addr: try: remote_host = socket.gethostbyaddr( row.remote_addr )[0] except: pass jobs.append( ( row.state, row.id, row.create_time, row.update_time, row.session_id, row.tool_id, row.user_email, remote_host, row.command_line, row.stderr, row.stack_trace, row.info ) ) return trans.fill_template( 'jobs_specified_date_all.mako', specified_date=start_date, day_label=day_label, month_label=month_label, year_label=year_label, day_of_month=day_of_month, jobs=jobs, msg=msg )
def _get_allocs_with_no_consumer_relationship(ctx): alloc_to_consumer = sa.outerjoin( ALLOC_TBL, CONSUMER_TBL, ALLOC_TBL.c.consumer_id == CONSUMER_TBL.c.uuid) sel = sa.select([ALLOC_TBL.c.consumer_id]) sel = sel.select_from(alloc_to_consumer) sel = sel.where(CONSUMER_TBL.c.id.is_(None)) return ctx.session.execute(sel).fetchall()
async def get(self): join = sa.outerjoin(Chat, UserChat, sa.and_( UserChat.chat == Chat.id, UserChat.user == self.request['user'].id, )) query = sa.select([Chat, UserChat.user]).select_from( join).order_by(Chat.id) chat_list = await (await self.db.execute(query)).fetchall() return web.json_response(chat_list, dumps=dumps)
def countQuery(self, criteria=None): query = super().countQuery(criteria) curEquipmentTable = Base.metadata.tables['CurrentlySensorEquiped'] MonitoredSiteTable = Base.metadata.tables['MonitoredSite'] joinTable = outerjoin( curEquipmentTable, MonitoredSite, MonitoredSiteTable.c['ID'] == curEquipmentTable.c['FK_MonitoredSite']) for obj in criteria: if 'available' in obj['Column']: query = self.WhereInJoinTable(query, obj) if (obj['Column'] in ['FK_MonitoredSiteName', 'FK_Individual'] and obj['Operator'] not in ['is null', 'is not null']): queryExist = select(curEquipmentTable.c ).select_from(joinTable ).where(Sensor.ID == curEquipmentTable.c['FK_Sensor']) if obj['Column'] == 'FK_MonitoredSiteName': queryExist = queryExist.where(eval_.eval_binary_expr( MonitoredSiteTable.c['Name'], obj['Operator'], obj['Value'])) if obj['Column'] == 'FK_Individual': queryExist = queryExist.where(eval_.eval_binary_expr( curEquipmentTable.c['FK_Individual'], obj['Operator'], obj['Value'])) query = query.where(exists(queryExist)) if (obj['Column'] in ['FK_MonitoredSiteName', 'FK_Individual'] and obj['Operator'] in ['is null', 'is not null']): queryExist = select(curEquipmentTable.c ).select_from(joinTable ).where(Sensor.ID == curEquipmentTable.c['FK_Sensor']) if obj['Column'] == 'FK_Individual': queryExist = queryExist.where( and_(Sensor.ID == curEquipmentTable.c['FK_Sensor'], curEquipmentTable.c['FK_Individual'] != None)) if obj['Column'] == 'FK_MonitoredSiteName': queryExist = queryExist.where( and_(Sensor.ID == curEquipmentTable.c['FK_Sensor'], curEquipmentTable.c['FK_MonitoredSite'] != None)) if 'not' in obj['Operator']: query = query.where(exists(queryExist)) else: query = query.where(not_(exists(queryExist))) return query
def get(run_id, query, with_stats=True): """Return a list of genotypes in a vcf conforming to the given query, as well as a dict of stats calculated on them. If a truth_vcf is associated with this VCF, stats include true/false, positive/negative stats, as well as precision, recall, and f1score. Stats also include the number of records, and the number of records once filters are applied. A query is a dictionary which specifies the range, filters, limit, offset and ordering which should be applied against genotypes before genotypes and stats are returned. It has structure: {range: {contig: "X", start: 0, end: 250000000}, filters: [{columnName: 'info:DP', filterValue: '50', type: '<'}, ...], sortBy: [{columnName: 'contig', order: 'asc'}, {columnName: 'position', order: 'asc'}, ...], page: 10, limit: 250 } """ query = _annotate_query_with_types(query, spec(run_id)) compare_to_run_id = query.get('compareToVcfId') with tables(db.engine, 'genotypes') as (con, g): if compare_to_run_id: # We consider a genotype validated if a truth genotype exists at its # location (contig/position) with the same ref/alts. This isn't # entirely accurate: for example, it handles SVs very poorly. gt = g.alias() joined_q = outerjoin(g, gt, and_( gt.c.vcf_id == compare_to_run_id, g.c.contig == gt.c.contig, g.c.position == gt.c.position, g.c.reference == gt.c.reference, g.c.alternates == gt.c.alternates, g.c.sample_name == gt.c.sample_name)) valid_column = label('tag:true-positive', gt.c.contig != None) q = (select(g.c + [valid_column]) .select_from(joined_q) .where(g.c.vcf_id == run_id)) else: q = select(g.c).where(g.c.vcf_id == run_id) q = _add_range(q, g, query.get('range')) q = _add_filters(q, g, query.get('filters')) q = _add_orderings(q, g, query.get('sortBy')) q = _add_paging(q, g, query.get('limit'), query.get('page')) q = _add_ordering(q, g, 'String', 'contig', 'asc') q = _add_ordering(q, g, 'Integer', 'position', 'asc') genotypes = [dict(g) for g in con.execute(q).fetchall()] stats = calculate_stats(run_id, compare_to_run_id, query) if with_stats else {} return {'records': genotypes, 'stats': stats}
def byHierarchyUserLeft(headers, after, before, user, subject, hierarchy): """ SQL generated for reports where Users are in the first column. Used when a time hierarchy is selected. """ # Grab data neccessary for hiearchy type if hierarchy == "Yearly": cols = getYears(after, before) table_headers = cols sqlType = "YEAR" elif hierarchy == "Monthly": cols = getMonths() table_headers = getMonthHeaders() sqlType = "MONTH" else: cols = getWeeks() table_headers = getWeekHeaders() sqlType = "WEEK" # Build list of columns to select by columns = ["users.user_name as Name"] if singleSubject(subject): columns += ["SUM( CASE images.subject WHEN '{0}' THEN 1 ELSE 0 END ) AS Total".format(subject)] else: columns += ["COUNT( images.photo_id ) AS Total"] columns += [sqlType + "(images.timing) As Col"] # Build select statement j = outerjoin(User, Image) statement = select(columns, from_obj=User) \ .where(getUserFilter(user)) \ .where(getDateRangeFilter(after, before)) \ .select_from(j) \ .group_by("Name, Col") # Save as sqlalchemy subquery subquery = db.session.query("Username", "Total", "Col").from_statement(statement).subquery() # Build list of columns to select by for outer select columns2 = ["Name As Name"] columns2 += ["SUM( CASE Col WHEN {0} THEN Total ELSE 0 END ) AS '{0}'".format(col) for col in cols] columns2 += ["SUM(Total) AS Total"] # Build outer select statement2 = select(columns2).group_by("Name WITH ROLLUP").select_from(subquery) select_columns = ["Name"] + cols + ["Total"] headers += ["User"] + table_headers + ["Total"] # Save outer select as sql alchemy query query = db.session.query(*select_columns).from_statement(statement2) return query
def per_user( self, trans, **kwd ): message = '' workflows = [] q = sa.select( ( model.User.table.c.email.label( 'user_email' ), sa.func.count( model.StoredWorkflow.table.c.id ).label( 'total_workflows' ) ), from_obj = [ sa.outerjoin( model.StoredWorkflow.table, model.User.table ) ], group_by = [ 'user_email' ], order_by = [ sa.desc( 'total_workflows' ), 'user_email' ] ) for row in q.execute(): workflows.append( ( row.user_email, row.total_workflows ) ) return trans.fill_template( '/webapps/reports/workflows_per_user.mako', workflows=workflows, message=message )
def per_user( self, trans, **kwd ): params = util.Params( kwd ) message = '' requests = [] q = sa.select( ( model.User.table.c.email.label( 'user_email' ), sa.func.count( model.Request.table.c.id ).label( 'total' ) ), from_obj = [ sa.outerjoin( model.Request.table, model.User.table ) ], group_by = [ 'user_email' ], order_by = [ sa.desc( 'total' ), 'user_email' ] ) for row in q.execute(): requests.append( ( row.user_email, row.total ) ) return trans.fill_template( '/webapps/reports/requests_per_user.mako', requests=requests, message=message )
def per_user(self, trans, **kwd): params = util.Params(kwd) message = "" jobs = [] q = sa.select( (model.User.table.c.email.label("user_email"), sa.func.count(model.Job.table.c.id).label("total_jobs")), from_obj=[sa.outerjoin(model.Job.table, model.User.table)], group_by=["user_email"], order_by=[sa.desc("total_jobs"), "user_email"], ) for row in q.execute(): jobs.append((row.user_email, row.total_jobs)) return trans.fill_template("/webapps/reports/jobs_per_user.mako", jobs=jobs, message=message)