def testZS2SA(self): db = get_engine('mysql://root@localhost/alc2', echo=True) meta = rdb.BoundMetaData(db) table = transmute(ITestInterface, meta) meta.create_all() self.assertEqual(table.columns.ASCII.type.__class__, rdb.TEXT) self.assertEqual(table.columns.ASCIILine.type.__class__, rdb.TEXT) self.assertEqual(table.columns.Bool.type.__class__, rdb.BOOLEAN)
def make_meta(): if sa_version == '0.3': if url.startswith('mysql') and not sa_opts: sa_opts['poolclass'] = pool.QueuePool engine = sa.create_engine(url, **sa_opts) meta = sa.BoundMetaData(engine) else: # SQLAlchemy pops the url, this ensures it sticks around # later sa_opts['sa.url'] = url engine = sa.engine_from_config(sa_opts, 'sa.') meta = sa.MetaData() meta.bind = engine return meta
def _table(): if not hasattr(self.q, 'meta'): self.q.meta = SA.BoundMetaData(self.q.engine) kw.setdefault('useexisting', True) table = SA.Table(name, self.q.meta, *cols, **kw) try: table.create() except: # SA 0.2 bug, useexisting=True ignored result = False else: result = True setattr(self.q, name, table) return result
def _table(): if not hasattr(self, '_meta'): if SA04: self._meta = SA.MetaData(self._engine) else: self._meta = SA.BoundMetaData(self._engine) indexes = {} for key in kw.keys(): if key.startswith('index_'): unique = False elif key.startswith('unique_'): unique = True else: continue indexes[key] = kw.pop(key), unique kw.setdefault('useexisting', True) table = SA.Table(name, self._meta, *cols, **kw) table.create(checkfirst=True) setattr(self, name, table) return table, indexes
def make_meta(): if url.startswith('mysql') and not sa_opts: sa_opts['poolclass'] = pool.QueuePool engine = sa.create_engine(url, **sa_opts) meta = sa.BoundMetaData(engine) return meta
def make_metadata(engine): metadata = sa.BoundMetaData(engine) metadata.bind.echo = g.sqlprinting return metadata
def __init__(self): self.metadata = sa.BoundMetaData('sqlite:///cwc3.db') self.session = sa.create_session() self.InitTables()
#We get the keys of the first data item. This let's us know what columns #we're going to be inserting data into. keys = data[0].keys() #Construct a list of columns that corresponds to sql syntax. cols = ",".join(keys) #Construct a list of mappings that sqlalchemy understands. These are basically #variables representing values that will be inserted via the data variable. maps = ",".join(map(lambda x: ":%s" % (x), keys)) conn = engine.connect() conn.execute("INSERT INTO %s (%s) VALUES (%s)" % (table, cols, maps), data) conn.close() engine = sa.create_engine(Options.Ip2c.dburl) metadata = sa.BoundMetaData(engine) table = sa.Table( Options.Ip2c.dbtable, metadata, sa.Column('id', sa.Integer, autoincrement=True, primary_key=True), sa.Column('ipfrom', sa.Integer), sa.Column('ipto', sa.Integer), sa.Column('registry', sa.String(20)), sa.Column('assigned', sa.Date), sa.Column('ctry', sa.String(5)), sa.Column('cntry', sa.String(5)), sa.Column('country', sa.String(50))) table.create(checkfirst=True) data = StringIO.StringIO() print("Downloading ip2country gzip file...") try: c = pycurl.Curl()
def parse_argv(): # we are using the unittest main runner, so we are just popping out the # arguments we need instead of using our own getopt type of thing global db, db_uri, metadata DBTYPE = 'sqlite' PROXY = False parser = optparse.OptionParser(usage="usage: %prog [options] [tests...]") parser.add_option("--dburi", action="store", dest="dburi", help="database uri (overrides --db)") parser.add_option( "--db", action="store", dest="db", default="sqlite", help= "prefab database uri (sqlite, sqlite_file, postgres, mysql, oracle, oracle8, mssql, firebird)" ) parser.add_option("--mockpool", action="store_true", dest="mockpool", help="use mock pool (asserts only one connection used)") parser.add_option("--verbose", action="store_true", dest="verbose", help="enable stdout echoing/printing") parser.add_option("--quiet", action="store_true", dest="quiet", help="suppress unittest output") parser.add_option("--log-info", action="append", dest="log_info", help="turn on info logging for <LOG> (multiple OK)") parser.add_option("--log-debug", action="append", dest="log_debug", help="turn on debug logging for <LOG> (multiple OK)") parser.add_option("--nothreadlocal", action="store_true", dest="nothreadlocal", help="dont use thread-local mod") parser.add_option( "--enginestrategy", action="store", default=None, dest="enginestrategy", help="engine strategy (plain or threadlocal, defaults to plain)") parser.add_option("--coverage", action="store_true", dest="coverage", help="Dump a full coverage report after running") parser.add_option( "--reversetop", action="store_true", dest="topological", help= "Reverse the collection ordering for topological sorts (helps reveal dependency issues)" ) (options, args) = parser.parse_args() sys.argv[1:] = args if options.dburi: db_uri = param = options.dburi DBTYPE = db_uri[:db_uri.index(':')] elif options.db: DBTYPE = param = options.db opts = {} if (None == db_uri): if DBTYPE == 'sqlite': db_uri = 'sqlite:///:memory:' elif DBTYPE == 'sqlite_file': db_uri = 'sqlite:///querytest.db' elif DBTYPE == 'postgres': db_uri = 'postgres://*****:*****@127.0.0.1:5432/test' elif DBTYPE == 'mysql': db_uri = 'mysql://*****:*****@127.0.0.1:3306/test' elif DBTYPE == 'oracle': db_uri = 'oracle://*****:*****@127.0.0.1:1521' elif DBTYPE == 'oracle8': db_uri = 'oracle://*****:*****@127.0.0.1:1521' opts = {'use_ansi': False} elif DBTYPE == 'mssql': db_uri = 'mssql://*****:*****@SQUAWK\\SQLEXPRESS/test' elif DBTYPE == 'firebird': db_uri = 'firebird://*****:*****@localhost/tmp/test.fdb' if not db_uri: raise "Could not create engine. specify --db <sqlite|sqlite_file|postgres|mysql|oracle|oracle8|mssql|firebird> to test runner." if not options.nothreadlocal: __import__('sqlalchemy.mods.threadlocal') sqlalchemy.mods.threadlocal.uninstall_plugin() global echo echo = options.verbose and not options.quiet global quiet quiet = options.quiet global with_coverage with_coverage = options.coverage if options.enginestrategy is not None: opts['strategy'] = options.enginestrategy if options.mockpool: db = engine.create_engine(db_uri, poolclass=pool.AssertionPool, **opts) else: db = engine.create_engine(db_uri, **opts) db = EngineAssert(db) if options.topological: from sqlalchemy.orm import unitofwork from sqlalchemy import topological class RevQueueDepSort(topological.QueueDependencySorter): def __init__(self, tuples, allitems): self.tuples = list(tuples) self.allitems = list(allitems) self.tuples.reverse() self.allitems.reverse() topological.QueueDependencySorter = RevQueueDepSort unitofwork.DependencySorter = RevQueueDepSort import logging logging.basicConfig() if options.log_info is not None: for elem in options.log_info: logging.getLogger(elem).setLevel(logging.INFO) if options.log_debug is not None: for elem in options.log_debug: logging.getLogger(elem).setLevel(logging.DEBUG) metadata = sqlalchemy.BoundMetaData(db)