def test_underscore_update_many_query(self, mock_clean): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.update = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._update_many(items, {'foo': 'bar'}) mock_clean.assert_called_once_with(items) clean_items.update.assert_called_once_with( {'foo': 'bar'}, synchronize_session='fetch') assert count == clean_items.update()
def test_underscore_update_many_query(self, mock_clean): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.update = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._update_many(items, {'foo': 'bar'}) mock_clean.assert_called_once_with(items) clean_items.update.assert_called_once_with({'foo': 'bar'}, synchronize_session='fetch') assert count == clean_items.update()
def test_underscore_delete_many_query(self, mock_clean, mock_on_bulk): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.delete = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._delete_many(items) mock_clean.assert_called_once_with(items) clean_items.delete.assert_called_once_with(synchronize_session=False) mock_on_bulk.assert_called_once_with(docs.BaseMixin, [1, 2, 3], None) assert count == clean_items.delete()
def add_coin(self, data): query = Query(CoinDimRow, session=self.DBSession()) query.add_columns(CoinDimRow.trade_symbol) res = query.all() res = list(set([coin.trade_symbol for coin in res])) for coin in data: if coin.strip() in res: pass else: row = CoinDimRow() row.trade_symbol = coin.strip() self.DBSession.add(row) self.DBSession.commit()
def test_underscore_delete_many_query( self, mock_clean, mock_on_bulk): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.delete = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._delete_many(items) mock_clean.assert_called_once_with(items) clean_items.delete.assert_called_once_with( synchronize_session=False) mock_on_bulk.assert_called_once_with( docs.BaseMixin, [1, 2, 3], None) assert count == clean_items.delete()
def yield_rows(query: Query) -> Generator[Row, None, None]: """Yield rows from the database.""" # end_day - start_day intentionally checks .days and not .total_seconds() # since we don't want to switch over to buffered if they go # over one day by a few hours since the UI makes it so easy to do that. if self.limited_select or (end_day - start_day).days <= 1: return query.all() # type: ignore[no-any-return] # Only buffer rows to reduce memory pressure # if we expect the result set is going to be very large. # What is considered very large is going to differ # based on the hardware Home Assistant is running on. # # sqlalchemy suggests that is at least 10k, but for # even and RPi3 that number seems higher in testing # so we don't switch over until we request > 1 day+ of data. # return query.yield_per(1024) # type: ignore[no-any-return]
def execute( qry: Query, to_native: bool = False, validate_entity_ids: bool = True ) -> list: """Query the database and convert the objects to HA native form. This method also retries a few times in the case of stale connections. """ for tryno in range(0, RETRIES): try: timer_start = time.perf_counter() if to_native: result = [ row for row in ( row.to_native(validate_entity_id=validate_entity_ids) for row in qry ) if row is not None ] else: result = qry.all() if _LOGGER.isEnabledFor(logging.DEBUG): elapsed = time.perf_counter() - timer_start if to_native: _LOGGER.debug( "converting %d rows to native objects took %fs", len(result), elapsed, ) else: _LOGGER.debug( "querying %d rows took %fs", len(result), elapsed, ) return result except SQLAlchemyError as err: _LOGGER.error("Error executing query: %s", err) if tryno == RETRIES - 1: raise time.sleep(QUERY_RETRY_WAIT) assert False # unreachable
def findByFieldsValues(cls,fields,values,session=None,onlyOne=True,notNoneFields=None,orderByFields=None,ascending=True,operators=None,groupByFields=None,selectFieldsAndFunctions=None,nestedOperators=None,returnDataframe=False,distinct=False,printQuery=False): if operators is None: operators = [SQLOperator.equalOperator] * 1000 close = False if session is None: session = cls.database.Session() close = True o = None if selectFieldsAndFunctions is None: o = session.query(cls) else: o = Query(selectFieldsAndFunctions, session=session) for field,value,operator in zip(fields,values,operators): operator.field = field operator.value = value o = operator.filter(o) if nestedOperators is not None: for operator in nestedOperators: o = operator.filter(o) if notNoneFields is not None: for f in notNoneFields: o = o.filter(f != None) if groupByFields is not None: for f in groupByFields: o = o.group_by(f) if orderByFields is not None: for f in orderByFields: if ascending: o = o.order_by(f) else: o = o.order_by(f.desc()) if printQuery: print(o) if onlyOne: o = o.first() else: if distinct: o = o.distinct() if returnDataframe: o = cls.queryToDataframe(o) else: o = o.all() if close: session.close() return o
def select(self, table, params): """ Uses params to build conditional SQL statement ( WHERE ... ). :Parameters: **params** - `dict` arguments for the mysql query ( must match table columns ! ). :return: S_OK() || S_ERROR() """ session = self.sessionMaker_o() # finding the table found = False for ext in self.extensions: try: table_c = getattr( __import__(ext + __name__, globals(), locals(), [table]), table) found = True break except (ImportError, AttributeError): continue # If not found in extensions, import it from DIRAC base (this same module). if not found: table_c = getattr( __import__(__name__, globals(), locals(), [table]), table) # handling query conditions found in 'Meta' columnNames = [ column.lower() for column in params.get('Meta', {}).get('columns', []) ] older = params.get('Meta', {}).get('older', None) newer = params.get('Meta', {}).get('newer', None) order = params.get('Meta', {}).get('order', None) limit = params.get('Meta', {}).get('limit', None) params.pop('Meta', None) try: # setting up the select query if not columnNames: # query on the whole table wholeTable = True columns = table_c.__table__.columns # retrieve the column names columnNames = [str(column).split('.')[1] for column in columns] select = Query(table_c, session=session) else: # query only the selected columns wholeTable = False columns = [getattr(table_c, column) for column in columnNames] select = Query(columns, session=session) # query conditions for columnName, columnValue in params.iteritems(): if not columnValue: continue column_a = getattr(table_c, columnName.lower()) if isinstance(columnValue, (list, tuple)): select = select.filter(column_a.in_(list(columnValue))) elif isinstance(columnValue, (basestring, datetime.datetime, bool)): select = select.filter(column_a == columnValue) else: self.log.error("type(columnValue) == %s" % type(columnValue)) if older: column_a = getattr(table_c, older[0].lower()) select = select.filter(column_a < older[1]) if newer: column_a = getattr(table_c, newer[0].lower()) select = select.filter(column_a > newer[1]) if order: order = [order] if isinstance(order, basestring) else list(order) column_a = getattr(table_c, order[0].lower()) if len(order) == 2 and order[1].lower() == 'desc': select = select.order_by(desc(column_a)) else: select = select.order_by(column_a) if limit: select = select.limit(int(limit)) # querying selectionRes = select.all() # handling the results if wholeTable: selectionResToList = [res.toList() for res in selectionRes] else: selectionResToList = [[ getattr(res, col) for col in columnNames ] for res in selectionRes] finalResult = S_OK(selectionResToList) finalResult['Columns'] = columnNames return finalResult except exc.SQLAlchemyError as e: session.rollback() self.log.exception("select: unexpected exception", lException=e) return S_ERROR("select: unexpected exception %s" % e) finally: session.close()
def select( self, table, params ): """ Uses params to build conditional SQL statement ( WHERE ... ). :Parameters: **params** - `dict` arguments for the mysql query ( must match table columns ! ). :return: S_OK() || S_ERROR() """ session = self.sessionMaker_o() # finding the table found = False for ext in self.extensions: try: table_c = getattr(__import__(ext + __name__, globals(), locals(), [table]), table) found = True break except (ImportError, AttributeError): continue # If not found in extensions, import it from DIRAC base (this same module). if not found: table_c = getattr(__import__(__name__, globals(), locals(), [table]), table) # handling query conditions found in 'Meta' columnNames = [column.lower() for column in params.get('Meta', {}).get('columns', [])] older = params.get('Meta', {}).get('older', None) newer = params.get('Meta', {}).get('newer', None) order = params.get('Meta', {}).get('order', None) limit = params.get('Meta', {}).get('limit', None) params.pop('Meta', None) try: # setting up the select query if not columnNames: # query on the whole table wholeTable = True columns = table_c.__table__.columns # retrieve the column names columnNames = [str(column).split('.')[1] for column in columns] select = Query(table_c, session = session) else: # query only the selected columns wholeTable = False columns = [getattr(table_c, column) for column in columnNames] select = Query(columns, session = session) # query conditions for columnName, columnValue in params.iteritems(): if not columnValue: continue column_a = getattr(table_c, columnName.lower()) if isinstance(columnValue, (list, tuple)): select = select.filter(column_a.in_(list(columnValue))) elif isinstance(columnValue, (basestring, datetime.datetime, bool) ): select = select.filter(column_a == columnValue) else: self.log.error("type(columnValue) == %s" %type(columnValue)) if older: column_a = getattr(table_c, older[0].lower()) select = select.filter(column_a < older[1]) if newer: column_a = getattr(table_c, newer[0].lower()) select = select.filter(column_a > newer[1]) if order: order = [order] if isinstance(order, basestring) else list(order) column_a = getattr(table_c, order[0].lower()) if len(order) == 2 and order[1].lower() == 'desc': select = select.order_by(desc(column_a)) else: select = select.order_by(column_a) if limit: select = select.limit(int(limit)) # querying selectionRes = select.all() # handling the results if wholeTable: selectionResToList = [res.toList() for res in selectionRes] else: selectionResToList = [[getattr(res, col) for col in columnNames] for res in selectionRes] finalResult = S_OK(selectionResToList) finalResult['Columns'] = columnNames return finalResult except exc.SQLAlchemyError as e: session.rollback() self.log.exception( "select: unexpected exception", lException = e ) return S_ERROR( "select: unexpected exception %s" % e ) finally: session.close()
def get_coins(self): query = Query(CoinDimRow, session=self.DBSession()) query.add_columns(CoinDimRow.trade_symbol, CoinDimRow.id) res = query.all() return [[coin.trade_symbol, coin.id] for coin in res]
def get_all(query: Query) -> List[BaseModel]: return query.all()