def test_sql_expression_pk_autoinc_lastinserted(self): # test that postfetch isn't invoked for a SQL expression # in a primary key column. the DB either needs to support a lastrowid # that can return it, or RETURNING. [ticket:3133] metadata = MetaData() table = Table( "sometable", metadata, Column("id", Integer, primary_key=True), Column("data", String), ) stmt = table.insert().return_defaults().values(id=func.foobar()) compiled = stmt.compile(dialect=sqlite.dialect(), column_keys=["data"]) eq_(compiled.postfetch, []) eq_(compiled.returning, []) self.assert_compile( stmt, "INSERT INTO sometable (id, data) VALUES " "(foobar(), ?)", checkparams={"data": "foo"}, params={"data": "foo"}, dialect=sqlite.dialect(), )
def get_ips(ids=None, deg=1): """ Return a pandas DataFrame with ionization energies for a set of elements. Args: ids: list, str or int A list of atomic number, symbols, element names of a combination of the above. If nothing is specified all elements are selected. deg: int or list of int Degree of ionization, either as int or a list of ints. If a list is passed then the output will contain ionization energies corresponding to particalr degrees in columns. Returns: df: DataFrame Pandas DataFrame with atomic numbers, symbols and ionization energies """ session = get_session() engine = get_engine() if ids is None: atns = list(range(1, 119)) else: atns = ids_to_attr(ids, attr="atomic_number") query = session.query(Element.atomic_number, Element.symbol).filter( Element.atomic_number.in_(atns)) df = pd.read_sql_query(query.statement.compile(dialect=sqlite.dialect()), engine) if isinstance(deg, (list, tuple)): if all(isinstance(d, int) for d in deg): for d in deg: query = (session.query(IonizationEnergy).filter( IonizationEnergy.degree == d).filter( IonizationEnergy.atomic_number.in_((atns)))) out = pd.read_sql_query( query.statement.compile(dialect=sqlite.dialect()), engine) out = out[["atomic_number", "energy"]] out.columns = ["atomic_number", "IP{0:d}".format(d)] df = pd.merge(df, out, on="atomic_number", how="left") else: raise ValueError("deg should be a list of ints") elif isinstance(deg, int): query = (session.query(IonizationEnergy).filter( IonizationEnergy.degree == deg).filter( IonizationEnergy.atomic_number.in_((atns)))) out = pd.read_sql_query( query.statement.compile(dialect=sqlite.dialect()), engine) out = out[["atomic_number", "energy"]] out.columns = ["atomic_number", "IP{0:d}".format(deg)] df = pd.merge(df, out, on="atomic_number", how="left") else: raise ValueError("deg should be an int or a list or tuple of ints") return df
def fetch_ionization_energies(degree: Union[List[int], int] = 1) -> pd.DataFrame: """ Fetch a :py:class:`pandas.DataFrame` with ionization energies for all elements indexed by atomic number. Args: degree: Degree of ionization, either as int or a list of ints. If a list is passed then the output will contain ionization energies corresponding to particalr degrees in columns. Returns: df (pandas.DataFrame): ionization energies, indexed by atomic number """ # validate degree if isinstance(degree, (list, tuple, set)): if not all(isinstance(d, int) for d in degree) or any(d <= 0 for d in degree): raise ValueError("degree should be a list of positive ints") elif isinstance(degree, int): if degree <= 0: raise ValueError(f"degree should be positive") degree = [degree] else: raise ValueError( f"degree should be either a positive int or a collection of positive ints, got: {degree}" ) session = get_session() engine = get_engine() query = session.query(Element.atomic_number).order_by("atomic_number") df = pd.read_sql_query(query.statement.compile(dialect=sqlite.dialect()), engine) for d in degree: query = session.query(IonizationEnergy).filter(IonizationEnergy.degree == d) energies = pd.read_sql_query( query.statement.compile(dialect=sqlite.dialect()), engine ) df = pd.merge( df, energies.loc[:, ["atomic_number", "energy"]].rename( columns={"energy": "IE{0:d}".format(d)} ), on="atomic_number", how="left", ) return df.set_index("atomic_number")
class SQLite(Resource): def autocommit_off(self) -> None: # default is off pass def create_column_metadata_set(self, cursor: Cursor) -> List[ColumnMetadata]: raise NotImplementedError DIALECT = sqlite.dialect(paramstyle='named') @classmethod def create_connection_maker( cls, host: Optional[str] = None, port: Optional[int] = None, user_name: Optional[str] = None, password: Optional[str] = None, engine_kwargs: Dict[str, Any] = None, ) -> ConnectionMaker: def connect(_: Optional[str] = None): # type: ignore return sqlite3.connect(':memory:') return connect def get_field_from_value(self, value: Any) -> Field: return super().get_field_from_value(value)
def get(self): t_i = time.time() offset = 0 limit = 1000 args = parser.parse_args() logger.debug(args) if args['offset']!=None: offset = args['offset'] if args['limit']!=None and args['limit'] < 1000: limit = args['limit'] # TODO: make max limit a config option with orm.get_session() as sess: q = sess.query(Event) if args['type']!=None: q = q.filter_by(type=args['type']) if args['src']!=None: q = q.filter_by(src_abbr=args['src']) if args['reverse']==None: # cant offset when reversed q = q.filter(Event.id >= offset) else: q = q.order_by(Event.id.desc()) q = q.limit(limit) result = default_ok_result.copy() qstring = str(q.statement.compile(dialect=sqlite.dialect(), compile_kwargs={"literal_binds": True})).replace('\n','') logger.debug("DB Query: {}".format(qstring)) es = q.all() logger.debug('queried db in {} seconds.'.format(time.time()-t_i)) result.update({'offset': offset, 'next_offset': (es[-1].id+1 if len(es)>0 else offset), 'results': [e.getDict() for e in es]}) return result
def get_sql(self, query): print("QUERY:", query) self._set_fields() tree = parser.parse(query) rtree = resolver(tree) print("REPR:", repr(rtree)) visited = (self.visit(rtree)) print("VISITED:", visited) self.make_joins() columns = self.get_columns() print("COLUMNS:", columns) s = select(columns) for join in self.joins: s = s.select_from(join) if self.where: print("WHERE:", self.where) for where in self.where: s = s.where(where) s = s.where(visited) if self.having is not None: s = s.having(self.having) if self.group_by is not None: s = s.group_by(self.group_by) print(s) print(s.compile().params) return str( s.compile(compile_kwargs={"literal_binds": True}, dialect=sqlite.dialect()))
async def test_pagination(database, notes, notes_data): await database.connect() # Create table dialect = sqlite.dialect() create_query = str(CreateTable(notes).compile(dialect=dialect)) await database.execute(query=create_query) # Insert query = notes.insert() await database.execute_many(query=query, values=notes_data) # Fetch multiple rows query = notes.select() pagination = CursorPagination(database, query) paginated = await pagination.page(PAGE_PARAMS) assert len(paginated.edges) == PAGE_PARAMS.first assert paginated.page_info.has_next_page == True assert paginated.page_info.has_previous_page == False searched = await pagination.page(SEARCH_PARAMS) assert len(searched.edges) == 1 filtered = await pagination.page(FILTER_PARAMS) actual = [e.node["completed"] for e in filtered.edges] expected = [True] * FILTER_PARAMS.first assert actual == expected drop_query = str(DropTable(notes).compile(dialect=dialect)) await drop_db(database, drop_query)
def find_by_pkg_name(session, package_name, version=None): """ Find a package by name. If version is `None`, returns all versions. Parameters ---------- session : :class:`sqlalchemy.orm.session.Session` package_name : str The NuGet name of the package - the "id" tag in the NuSpec file. version : str The version of the package to download. If `None`, then return all versions. Returns ------- results : list of :class:`Version` """ logger.debug("db.find_by_pkg_name('%s', version='%s')" % (package_name, version)) query = (session.query(Version).join(Package).filter( Package.name == package_name)) stmt = query.statement.compile(dialect=sqlite.dialect(), compile_kwargs={"literal_binds": True}) logger.debug(stmt) if version: query = query.filter(Version.version == version) query.order_by(desc(Version.version)) results = query.all() logger.info("Found %d results." % len(results)) logger.debug(results) return results
def index(): conn = db.session() if request.method == 'GET': bookmarks = conn.query(Bookmark).join( User, Bookmark.user_id == User.id).order_by(Bookmark.id).all() if request.method == 'POST': query = request.form['search'] bookmarks = conn.query(Bookmark).join( User, Bookmark.user_id == User.id).filter( Bookmark.title.like(f"%{query}%"), Bookmark.private == 0).all() from sqlalchemy.dialects import sqlite print( conn.query(Bookmark).join( User, Bookmark.user_id == User.id).filter( Bookmark.title.like(f"%{query}%")).statement.compile( dialect=sqlite.dialect())) page = request.args.get(get_page_parameter(), type=int, default=1) res = bookmarks[(page - 1) * 10:page * 10] pagination = Pagination(page=page, total=len(bookmarks), per_page=10, css_framework='bootstrap4') return render_template('bookmark/index.html', bookmarks=res, pagination=pagination)
def create_index(table: Table): creates = {} for index in table.indexes: index: Index stmt = CreateIndex(index).compile(dialect=sqlite.dialect()) stmt = str(stmt).replace('INDEX', 'INDEX IF NOT EXISTS').strip() creates[index.name] = stmt return creates
def test_process_bind_param_none(self): """Assert UUIDs with other dialects are hex-encoded strings of length 32.""" guid = models.GUID() dialect = sqlite.dialect() result = guid.process_bind_param(None, dialect) self.assertTrue(result is None)
def test_load_dialect_impl_other(self): """Assert with dialects other than PostgreSQL, a CHAR type is used.""" guid = models.GUID() dialect = sqlite.dialect() result = guid.load_dialect_impl(dialect) self.assertTrue(isinstance(result, CHAR))
def pprint(query_or_sql, engine): if isinstance(query_or_sql, Query): sql = query_or_sql.statement.compile(dialect=sqlite.dialect()) else: sql = query_or_sql result_proxy = engine.execute(sql) p_table = from_db_cursor(result_proxy.cursor) print(p_table)
def test_generic_now(self): assert isinstance(func.now().type, sqltypes.DateTime) for ret, dialect in [('CURRENT_TIMESTAMP', sqlite.dialect()), ('now()', postgresql.dialect()), ('now()', mysql.dialect()), ('CURRENT_TIMESTAMP', oracle.dialect())]: self.assert_compile(func.now(), ret, dialect=dialect)
def log_query(self, q): """ Log Query :param q: Query """ query_str =str(q.statement.compile(dialect=sqlite.dialect())) log.debug(query_str)
def test_process_result_string(self): """Assert when the result value is a string, a native UUID is returned.""" guid = models.GUID() uuid = uuid4() result = guid.process_result_value(str(uuid), sqlite.dialect()) self.assertTrue(isinstance(result, UUID)) self.assertEqual(uuid, result)
def test_process_result_short_string(self): """Assert when the result value is a short string, a native UUID is returned.""" guid = models.GUID() uuid = uuid4() result = guid.process_result_value(str(uuid).replace("-", ""), sqlite.dialect()) self.assertTrue(isinstance(result, UUID)) self.assertEqual(uuid, result)
def test_generic_random(self): assert func.random().type == sqltypes.NULLTYPE assert isinstance(func.random(type_=Integer).type, Integer) for ret, dialect in [('random()', sqlite.dialect()), ('random()', postgresql.dialect()), ('rand()', mysql.dialect()), ('random()', oracle.dialect())]: self.assert_compile(func.random(), ret, dialect=dialect)
def test_process_bind_param_str_other(self): """Assert UUIDs with other dialects are hex-encoded strings of length 32.""" guid = models.GUID() uuid = uuid4() dialect = sqlite.dialect() result = guid.process_bind_param(str(uuid), dialect) self.assertEqual(32, len(result)) self.assertEqual(str(uuid).replace("-", ""), result)
def test_generic_now(self): assert isinstance(func.now().type, sqltypes.DateTime) for ret, dialect in [ ('CURRENT_TIMESTAMP', sqlite.dialect()), ('now()', postgresql.dialect()), ('now()', mysql.dialect()), ('CURRENT_TIMESTAMP', oracle.dialect()) ]: self.assert_compile(func.now(), ret, dialect=dialect)
def log_query(q): """Log Query :param q: Query """ statement = q.statement.compile(dialect=sqlite.dialect()) if statement.params: query_str = '{0}\n{1}'.format(statement, statement.params) else: query_str = str(statement) log.debug(query_str)
def test_generic_random(self): assert func.random().type == sqltypes.NULLTYPE assert isinstance(func.random(type_=Integer).type, Integer) for ret, dialect in [ ('random()', sqlite.dialect()), ('random()', postgresql.dialect()), ('rand()', mysql.dialect()), ('random()', oracle.dialect()) ]: self.assert_compile(func.random(), ret, dialect=dialect)
def test_nextval_unsupported(self): """test next_value() used on non-sequence platform raises NotImplementedError.""" s = Sequence("my_seq") d = sqlite.dialect() assert_raises_message( NotImplementedError, "Dialect 'sqlite' does not support sequence increments.", s.next_value().compile, dialect=d)
def stringify_query(query, kwargs): """ Sample usage: print stringify_query(session.query(model).filter_by(**kwargs), kwargs) """ from sqlalchemy.dialects import sqlite s = str(query.statement.compile(dialect=sqlite.dialect())) for key in kwargs: s = s.replace('?', '\'' + str(kwargs[key]) + '\'', 1) pass return s
def test_nextval_unsupported(self): """test next_value() used on non-sequence platform raises NotImplementedError.""" s = Sequence("my_seq") d = sqlite.dialect() assert_raises_message( NotImplementedError, "Dialect 'sqlite' does not support sequence increments.", s.next_value().compile, dialect=d )
def retrieve_stations_data(self, station: models.Station, template_for_lines): # print(template_for_lines) session = self.Session() engine = self.engine dict_with_relevant_to_station_help_ids = { helper.id: helper.observable_id for helper in station.helper } sql = session.query(models.Observations). \ filter(models.Observations.helper_observable_id.in_(list(dict_with_relevant_to_station_help_ids))) sql_literal = str( sql.statement.compile(dialect=sqlite.dialect(), compile_kwargs={"literal_binds": True})) df = pd.read_sql_query(sql_literal, engine) df['helper_observable_id'] = df['helper_observable_id']. \ apply(lambda x: dict_with_relevant_to_station_help_ids[x]) df.drop('id', axis=1, inplace=True) df = df.pivot_table(index=df.timestamp, columns='helper_observable_id', values='value', aggfunc='first') df.reset_index(inplace=True) df["timestamp"] = df["timestamp"].apply(lambda x: pd.to_datetime(x)) df.set_index(keys=["timestamp"], drop=False, inplace=True) station_df = copy.deepcopy(df) # Convert values to Measurement objects observables_list = list(df) observables_list.remove('timestamp') table = str.maketrans(dict.fromkeys('#*\"')) for observable in observables_list: df[observable] = df[observable].apply(lambda x: Measurement(x)) try: station_df[observable] = station_df[observable].apply( lambda x: str(x).translate(table)) except Exception as e: print(observable, e.args) # TODO: This is soooooo dangerous. Please re-implement...... station_df = station_df.replace('None', np.nan) # station_df = station_df.fillna('---') # print(station_df) zip_argument = map(lambda x: "df." + x, template_for_lines) zip_argument = ",".join(zip_argument) zip_argument = eval("zip(%s)" % zip_argument) return Station(station, df=station_df), zip_argument
def test_sqlite_in_with_multiple_columns(): vals = ( (1, '1'), (2, '2'), ) cols = table.c.id, table.c.a select_in = sa.select(cols).where(sa.tuple_(*cols).in_(vals)) select_and = sa.select(cols).where( sa.or_( sa.and_(col == value for col, value in zip(cols, vs)) for vs in vals)) compiled_in = select_in.compile(dialect=sqlite.dialect()) compiled_and = select_and.compile(dialect=sqlite.dialect()) assert str(compiled_in) == str(compiled_and) update_in = table.update().where(sa.tuple_(*cols).in_(vals)) update_and = table.update().where( sa.or_( sa.and_(col == value for col, value in zip(cols, vs)) for vs in vals)) compiled_in = update_in.compile(dialect=sqlite.dialect()) compiled_and = update_and.compile(dialect=sqlite.dialect()) assert str(compiled_in) == str(compiled_and)
def test_sqlite_in_with_multiple_columns(): vals = ( (1, '1'), (2, '2'), ) cols = table.c.id, table.c.a select_in = sa.select(cols).where(sa.tuple_(*cols).in_(vals)) select_and = sa.select(cols).where(sa.or_( sa.and_(col == value for col, value in zip(cols, vs)) for vs in vals )) compiled_in = select_in.compile(dialect=sqlite.dialect()) compiled_and = select_and.compile(dialect=sqlite.dialect()) assert str(compiled_in) == str(compiled_and) update_in = table.update().where(sa.tuple_(*cols).in_(vals)) update_and = table.update().where(sa.or_( sa.and_(col == value for col, value in zip(cols, vs)) for vs in vals )) compiled_in = update_in.compile(dialect=sqlite.dialect()) compiled_and = update_and.compile(dialect=sqlite.dialect()) assert str(compiled_in) == str(compiled_and)
def schema(): parser = argparse.ArgumentParser( description="Generate different schema documents") parser.add_argument( "schema", metavar="<SCHEMA>", type=str, choices=("openapi", "dsl", "sql"), default="openapi", help= ("The schema to generate: 'openapi' for the complete web OpenAPI/Swagger schema," " 'dsl' for the JSON schema for the DSL, 'sql' for the database schema." ), ) parser.add_argument( "outfile", metavar="<FILE>", nargs="?", type=argparse.FileType("w"), default=sys.stdout, help="The file to use. <stdout> if not specified.", ) parser.add_argument( "-r", "--raw", action="store_true", help="Do not pretty print the generated JSON.", ) args = parser.parse_args() if args.schema == "openapi": json_schema = get_openapi( title=app.title, version=app.version, openapi_version=app.openapi_version, description=app.description, routes=app.routes, ) _print_json(json_schema, args.outfile, args.raw) elif args.schema == "dsl": json_schema = JobModel.schema() _print_json(json_schema, args.outfile, args.raw) elif args.schema == "sql": for tbl in db.__all__: print( CreateTable(getattr( db, tbl).__table__).compile(dialect=sqlite.dialect()), file=args.outfile, )
def ORM2SQL(self, statement): """ :param statement: :return: """ query = statement.compile(dialect=sqlite.dialect()) query_str = str(query) query_paras: dict = query.params query_raw_sql = query_str.replace('?', r"%r") % tuple( query_paras.values()) # print("==query_raw_sql==↓:") # print(query_raw_sql) # print("==query_raw_sql==↑:") return query_raw_sql
def __init__(self, uri): parsed = urlparse.urlparse(uri) args = [] if parsed.scheme in ("sqlite", "sqlite3"): args.append("sqlite3") if not parsed.path: args.append(":memory:") else: args.append(parsed.path) from sqlalchemy.dialects.sqlite import dialect self.dialect = dialect() self.pool = adbapi.ConnectionPool(*args, cp_min=1, cp_max=1)
class SQLite(Resource): DIALECT = sqlite.dialect(paramstyle='named') @classmethod def create_connection_maker( cls, host: Optional[str] = None, port: Optional[int] = None, user_name: Optional[str] = None, password: Optional[str] = None, engine_kwargs: Dict[str, Any] = None, ) -> ConnectionMaker: def connect(): # type: ignore return sqlite3.connect(':memory:') return connect
def artists(): # If the user not specifically define 'false' for force (which is the # default) value, then the attempt will be treated as if it were defined as # 'true'. Eg. force=yes or force=1 and even force=fasle (mistyped value) # will be treated as force=true force = request.args.get('force') force = True if force is not None and force != 'false' else False # Get and set each parameter's value asked = {} for parameter, getter in PARAMETERS.items(): try: asked[parameter] = getter(request.args.get(parameter), force) except ParamError as error: return error.jsonify() # Store values more than once locally count = asked['count'] start = (asked['start'] - START) * count weights = asked['sort'] distance = Artist.distance(asked['latitude'], asked['longitude']).label('distance') # Do the query query = session.query(Artist, distance).filter( true() if asked['gender'] == 'both' else Artist.gender == asked['gender'], Artist.rate <= asked['rate'], Artist.age.between(asked['youngest'], asked['oldest']), literal_column(distance.name) <= asked['radius']).order_by( (asc if asked['order'] == 'asc' else desc )((weights['age'] * Artist.age) + (weights['gender'] * Artist.gender) + (weights['rate'] * Artist.rate) + (weights['distance'] * literal_column(distance.name)))).slice( start, start + count) # If debugging mode is on print the compiled SQL(ite) query if app.debug: print('\n', str(query.statement.compile(dialect=sqlite.dialect())), 'parameters:', asked, sep='\n', end='\n\n') # Return serialised and jsonified result return jsonify([artist.serialise(distance) for artist, distance in query])
class SQLite(Resource): def create_column_metadata_set(self, cursor: Cursor) -> List[ColumnMetadata]: raise NotImplementedError DIALECT = sqlite.dialect(paramstyle='named') @classmethod def create_connection_maker( cls, host: Optional[str] = None, port: Optional[int] = None, user_name: Optional[str] = None, password: Optional[str] = None, engine_kwargs: Dict[str, Any] = None, ) -> ConnectionMaker: def connect(_: Optional[str] = None): # type: ignore return sqlite3.connect(':memory:') return connect
def fetch_electronegativities(scales: List[str] = None) -> pd.DataFrame: """ Fetch electronegativity scales for all elements as :py:class:`pandas.DataFrame` Args: scales: list of scale names, defaults to all available scales Returns: df (pandas.DataFrame): Pandas DataFrame with the contents of the table """ session = get_session() engine = get_engine() query = session.query(Element.atomic_number).order_by("atomic_number") df = pd.read_sql_query(query.statement.compile(dialect=sqlite.dialect()), engine) scales = [ "allen", "allred-rochow", "cottrell-sutton", "ghosh", "gordy", "li-xue", "martynov-batsanov", "mulliken", "nagle", "pauling", "sanderson", ] for scale in scales: scale_name = "-".join(s.capitalize() for s in scale.split("-")) df.loc[:, scale_name] = [ element(int(row.atomic_number)).electronegativity(scale=scale) for _, row in df.iterrows() ] return df.set_index("atomic_number")
def rlist(dbtype, tablename): lkup, dbsess = obtain_lkup(dbtype) if (lkup is None): return render_template("errors/error.html", error_title="Lookup Failure", error_message="Unknown Database") display_tablename = lkup[tablename].model.rlist_dis if (lkup[tablename].lsform == rstruct.all_only): # Get all matches mobj = getMatch(lkup, tablename) columnHead = mobj[0] match = mobj[1] prikey_match = mobj[2] return render_template('res/datalist0.html', dbtype = dbtype,\ colNum=len(columnHead),matches=match,\ columnHead=columnHead, tablename=tablename,\ prikeyMatch=prikey_match,\ data_table_name=display_tablename) else: resls_form = lkup[tablename].lsform() #creates an LS FORM resls_form = regenerateForm(lkup, resls_form, None) if resls_form.validate_on_submit(): q = resls_form.getrawquery() elif (session.get('query') is not None and session.get('param') is not None): # previous query is inplace q = session['query'] qp = json.loads(session['param']) # THIS IS VERY DANGEROUS ! VULNERABLE TO SQLI -- ToraNova # But I have no choice to use this for the sake of convenience # Please look into # https://stackoverflow.com/questions/14845196/dynamically-constructing-filters-in-sqlalchemy q = dbsess.query(lkup[tablename].model).from_statement( text(q)).params(qp) else: # await user query crawq = "No Queries. Hit 'Query' to start" return render_template('res/datalist1.html', dbtype=dbtype, colNum=0, matches=[], columnHead=[], tablename=tablename, prikeyMatch=[], data_table_name=display_tablename, form=resls_form, dqstr=crawq) #session['query'] = str(q.with_labels().statement) # SPECIAL THANKS TO # https://stackoverflow.com/questions/4617291/how-do-i-get-a-raw-compiled-sql-query-from-a-sqlalchemy-expression # crawq = str(q.statement.compile(compile_kwargs={"literal_binds": True})) crawq = q.statement.compile(dialect=sqlite.dialect(paramstyle="named")) crawq_str = str(crawq) # Thanks again to # https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable crawq_params = json.dumps(crawq.params, cls=ExtendedEncoder) session['query'] = crawq_str session['param'] = crawq_params # FOR LS FORM SESSION/QUERY READY rawlist = q.all() try: mobj = getMatch(lkup, tablename, rawlist) columnHead = mobj[0] match = mobj[1] prikey_match = mobj[2] except Exception as e: crawq = "Expired Query. Hit Query to start again" columnHead = [] match = [] prikey_match = [] return render_template('res/datalist1.html', dbtype = dbtype,\ colNum=len(columnHead),matches=match,\ columnHead=columnHead, tablename=tablename,\ prikeyMatch=prikey_match,\ data_table_name=display_tablename, form = resls_form, dqstr = crawq)
def test_match_1(self): self.assert_compile(self.table1.c.myid.match('somstr'), "mytable.myid MATCH ?", dialect=sqlite.dialect())
expression2 = user_table.c.username.in_(["wendy", "mary", "ed"]) compiled=expression2.compile() print("expression2 : {}".format(expression2)) print("xpression2.compile().params : {}".format(compiled.params)) print("-" * 80) """ heading = "Building expressions using SQL Expression." print_output(number, code, heading) #Dialect differences print("Dialect Differences.") print("-" * 80) expression = user_table.c.username == 'ed' print(expression.compile(dialect=mysql.dialect())) print(expression.compile(dialect=postgresql.dialect())) print(expression.compile(dialect=sqlite.dialect())) print("-" * 80) #Expressions as an object - BinaryExpression object. print("Expression as an object - BinaryExpression object.") print("-" * 80) print("expression : {}".format(expression)) print("type(expression) : {}".format(type(expression))) print("expression.left : {}".format(expression.left)) print("expression.right : {}".format(expression.right)) print("expression.operator : {}".format(expression.operator)) print("-" * 80) #Compiling expressions print("Parameters of expressions") print("-" * 80) expression2 = user_table.c.username.in_(["wendy", "mary", "ed"]) compiled = expression2.compile()
def test_mac_load_dialect_impl(self): dialect = self.mac.load_dialect_impl(sqlite.dialect()) self.assertEqual(type(dialect), sqlite.CHAR)
def test_process_result_value_with_value(self): bind = self.inet.process_result_value(1.0, sqlite.dialect()) self.assertEqual(bind, 1.0)
def test_process_bind_param_with_value(self): bind = self.inet.process_bind_param("foo", sqlite.dialect()) self.assertEqual(bind, "foo")
def test_inet_load_dialect_impl_sqlite(self): dialect = self.inet.load_dialect_impl(sqlite.dialect()) self.assertEqual(type(dialect), sqlite.CHAR)
def test_process_result_value_none(self): """Assert when the result value is None, None is returned.""" guid = models.GUID() self.assertTrue(guid.process_result_value(None, sqlite.dialect()) is None)
from sqlalchemy.dialects import sqlite, mysql __all__ = ['sqlite_dialect', 'mysql_dialect', 'create_table_sql', 'sqlite3'] sqlite_dialect = sqlite.dialect() mysql_dialect = mysql.dialect() def create_table_sql(table, dialect): from sqlalchemy.schema import CreateTable return str(CreateTable(table).compile(dialect=dialect)).strip() def sqlite3(db_file, command): import subprocess return subprocess.check_output(['sqlite3', db_file, command])