def _do_tables(self, mapper, engine): metadata = MetaData() test_table = Table( "test", metadata, Column("id", sqla_types.Integer, primary_key=True, nullable=False), Column("name", sqla_types.String, nullable=False), ) pk_test_table = Table( "pk_test", metadata, Column("foobar", sqla_types.String, primary_key=True, nullable=False), Column("baz", sqla_types.String, nullable=False), ) Test = type("Test", (Base,), {}) PKTest = type( "PKTest", (Base,), {"__unicode__": lambda x: x.baz, "__str__": lambda x: x.baz}, ) mapper(Test, test_table) mapper(PKTest, pk_test_table) self.Test = Test self.PKTest = PKTest metadata.create_all(bind=engine)
def create_area_compare_table(self): """Get table of ERA5 reanalysis. """ table_name = f'wind_radii_area_compare' class WindRadiiAreaCompare(object): pass if self.engine.dialect.has_table(self.engine, table_name): metadata = MetaData(bind=self.engine, reflect=True) t = metadata.tables[table_name] mapper(WindRadiiAreaCompare, t) return WindRadiiAreaCompare cols = [] cols.append(Column('key', Integer, primary_key=True)) cols.append(Column('sid', String(13), nullable=False)) cols.append(Column('date_time', DateTime, nullable=False)) for type in ['ibtracs', 'era5', 'smap']: for r in self.wind_radii: col_name = f'{type}_r{r}_area' cols.append(Column(col_name, Float, nullable=False)) cols.append( Column('sid_date_time', String(50), nullable=False, unique=True)) metadata = MetaData(bind=self.engine) t = Table(table_name, metadata, *cols) mapper(WindRadiiAreaCompare, t) metadata.create_all() self.session.commit() return WindRadiiAreaCompare
def create_reflected_test_database(read_engine, write_engine): meta = MetaData(bind=write_engine) meta.reflect(bind=read_engine) for tablename in ('matviews', 'stats_station_var'): meta.remove(meta.tables[tablename]) logger.info("Overriding PG types that are unknown to sqlite") meta.tables['meta_history'].columns['tz_offset'].type = Integer() meta.tables['obs_raw'].columns['mod_time'].server_default = None meta.tables['meta_history'].columns['the_geom'].type = Integer() # These are all BIGINT in postgres meta.tables['obs_raw'].columns['obs_raw_id'].type = Integer() meta.tables['obs_raw_native_flags'].columns['obs_raw_id'].type = Integer() meta.tables['obs_raw_pcic_flags'].columns['obs_raw_id'].type = Integer() logger.info("Unsetting all of the sequence defaults") for tablename, table in meta.tables.iteritems(): if hasattr(table, 'primary_key'): for column in table.primary_key.columns.values(): if column.server_default: column.server_default = None logger.info("Creating a subset of the tables") to_search = [ 'obs_raw', 'meta_history', 'meta_station', 'meta_network', 'meta_vars', 'meta_contact' ] to_create = [ table for tablename, table in meta.tables.iteritems() if tablename in to_search ] # Don't have contact in the postgres database yet 2013.12.04 meta.tables['meta_network'].append_column(Column('contact_id', Integer)) meta.create_all(tables=to_create)
def test_create_table(engine, bigquery_dataset): meta = MetaData() Table( f"{bigquery_dataset}.test_table_create", meta, Column("integer_c", sqlalchemy.Integer, doc="column description"), Column("float_c", sqlalchemy.Float), Column("decimal_c", sqlalchemy.DECIMAL), Column("string_c", sqlalchemy.String), Column("text_c", sqlalchemy.Text), Column("boolean_c", sqlalchemy.Boolean), Column("timestamp_c", sqlalchemy.TIMESTAMP), Column("datetime_c", sqlalchemy.DATETIME), Column("date_c", sqlalchemy.DATE), Column("time_c", sqlalchemy.TIME), Column("binary_c", sqlalchemy.BINARY), bigquery_description="test table description", bigquery_friendly_name="test table name", ) meta.create_all(engine) meta.drop_all(engine) # Test creating tables with declarative_base Base = declarative_base() class TableTest(Base): __tablename__ = f"{bigquery_dataset}.test_table_create2" integer_c = Column(sqlalchemy.Integer, primary_key=True) float_c = Column(sqlalchemy.Float) Base.metadata.create_all(engine) Base.metadata.drop_all(engine)
def init_db(): metadata = MetaData() persons = Table( 'person', metadata, Column('id', Integer, Sequence('person_id_seq'), primary_key=True), Column('name', String(64)), Column('nickName', String(64)), Column('location', String(128))) goods = Table( 'goods', metadata, Column('id', Integer, Sequence('good_id_seq'), primary_key=True), Column('category', ARRAY(String())), Column('detail', String()), Column('name', String(128)), Column('price', Float()), Column('description', String()), Column('brand', String(64))) reviews = Table( 'reviews', metadata, Column('id', Integer, Sequence('review_id_seq'), primary_key=True), Column('review', String()), Column('stars', Float()), Column('title', String()), Column('date_review', DateTime()), Column('helpful', Float()), Column('person_id', ForeignKey('person.id')), Column('product_id', ForeignKey('goods.id'))) mapper(Persons, persons) mapper(Goods, goods) mapper(Reviews, reviews) metadata.create_all(bind=engine)
def test_create_table(engine): meta = MetaData() table = Table('test_pybigquery.test_table_create', meta, Column('integer_c', sqlalchemy.Integer, doc="column description"), Column('float_c', sqlalchemy.Float), Column('decimal_c', sqlalchemy.DECIMAL), Column('string_c', sqlalchemy.String), Column('text_c', sqlalchemy.Text), Column('boolean_c', sqlalchemy.Boolean), Column('timestamp_c', sqlalchemy.TIMESTAMP), Column('datetime_c', sqlalchemy.DATETIME), Column('date_c', sqlalchemy.DATE), Column('time_c', sqlalchemy.TIME), Column('binary_c', sqlalchemy.BINARY), bigquery_description="test table description", bigquery_friendly_name="test table name") meta.create_all(engine) meta.drop_all(engine) # Test creating tables with declarative_base Base = declarative_base() class TableTest(Base): __tablename__ = 'test_pybigquery.test_table_create2' integer_c = Column(sqlalchemy.Integer, primary_key=True) float_c = Column(sqlalchemy.Float) Base.metadata.create_all(engine) Base.metadata.drop_all(engine)
def _do_tables(self, mapper, engine): metadata = MetaData() test_table = Table( 'test', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('name', String, nullable=False), ) pk_test_table = Table( 'pk_test', metadata, Column('foobar', String, primary_key=True, nullable=False), Column('baz', String, nullable=False), ) Test = type(str('Test'), (Base, ), {}) PKTest = type(str('PKTest'), (Base, ), { '__unicode__': lambda x: x.baz, '__str__': lambda x: x.baz, }) mapper(Test, test_table, order_by=[test_table.c.name]) mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz]) self.Test = Test self.PKTest = PKTest metadata.create_all(bind=engine)
def test_insert_table(engine_testaccount): metadata = MetaData() users = Table( 'users', metadata, Column('id', Integer, Sequence('user_id_seq'), primary_key=True), Column('name', String), Column('fullname', String), ) metadata.create_all(engine_testaccount) data = [{ 'id': 1, 'name': 'testname1', 'fullname': 'fulltestname1', }, { 'id': 2, 'name': 'testname2', 'fullname': 'fulltestname2', }] conn = engine_testaccount.connect() try: # using multivalue insert conn.execute(users.insert(data)) results = conn.execute(select([users]).order_by('id')) row = results.fetchone() assert row['name'] == 'testname1' finally: conn.close() users.drop(engine_testaccount)
class SQLAlchemy: def __init__(self, dsn): engine = create_engine(dsn) self._metadata = MetaData(engine) def create_all_tables(self): logging.debug("Creating all DB tables") self._metadata.create_all() def configure_mappings(self): logging.debug("Configuring DB mappings") user = Table('users', self._metadata, Column('id', Integer, primary_key=True), Column('email', String(254), nullable=False), # compliant with RFCs 3696 and 5321 Column('password', String(128), nullable=False), Column('last_login', DateTime()), Column('created_at', DateTime()), Column('is_active', Boolean()), ) sessions = Table('sessions', self._metadata, Column('id', String(40), primary_key=True), Column('data', String(262144), nullable=False), # approx. 1 mb Column('expires_at', DateTime(), nullable=False), ) return self._metadata
def test_insert_table(engine_testaccount): metadata = MetaData() users = Table('users', metadata, Column('id', Integer, Sequence('user_id_seq'), primary_key=True), Column('name', String), Column('fullname', String), ) metadata.create_all(engine_testaccount) data = [{ 'id': 1, 'name': 'testname1', 'fullname': 'fulltestname1', }, { 'id': 2, 'name': 'testname2', 'fullname': 'fulltestname2', }] conn = engine_testaccount.connect() try: # using multivalue insert conn.execute(users.insert(data)) results = conn.execute(select([users]).order_by('id')) row = results.fetchone() assert row['name'] == 'testname1' finally: conn.close() users.drop(engine_testaccount)
def _do_tables(self, mapper, engine): metadata = MetaData() test_table = Table( "test", metadata, Column("id", Integer, primary_key=True, nullable=False), Column("name", String, nullable=False), ) pk_test_table = Table( "pk_test", metadata, Column("foobar", String, primary_key=True, nullable=False), Column("baz", String, nullable=False), ) Test = type(str("Test"), (Base,), {}) PKTest = type(str("PKTest"), (Base,), {"__unicode__": lambda x: x.baz, "__str__": lambda x: x.baz}) mapper(Test, test_table, order_by=[test_table.c.name]) mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz]) self.Test = Test self.PKTest = PKTest metadata.create_all(bind=engine)
def test_unnest(engine, bigquery_dataset): from sqlalchemy import select, func, String from sqlalchemy_bigquery import ARRAY conn = engine.connect() metadata = MetaData() table = Table( f"{bigquery_dataset}.test_unnest", metadata, Column("objects", ARRAY(String)), ) metadata.create_all(engine) conn.execute(table.insert(), [dict(objects=["a", "b", "c"]), dict(objects=["x", "y"])]) query = select([func.unnest(table.c.objects).alias("foo_objects").column]) compiled = str(query.compile(engine)) assert " ".join(compiled.strip().split()) == ( f"SELECT `foo_objects`" f" FROM" f" `{bigquery_dataset}.test_unnest` `{bigquery_dataset}.test_unnest_1`," f" unnest(`{bigquery_dataset}.test_unnest_1`.`objects`) AS `foo_objects`" ) assert sorted(r[0] for r in conn.execute(query)) == ["a", "b", "c", "x", "y"]
def _do_tables(self, mapper, engine): metadata = MetaData() test_table = Table('test', metadata, Column('id', Integer, primary_key=True, nullable=False), Column('name', String, nullable=False), ) pk_test_table = Table('pk_test', metadata, Column('foobar', String, primary_key=True, nullable=False), Column('baz', String, nullable=False), ) Test = type('Test', (Base, ), {}) PKTest = type('PKTest', (Base, ), { '__unicode__': lambda x: x.baz, '__str__': lambda x: x.baz, }) mapper(Test, test_table, order_by=[test_table.c.name]) mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz]) self.Test = Test self.PKTest = PKTest metadata.create_all(bind=engine)
def create_tc_table(self, basin): """Create the table which represents TC records from IBTrACS. """ table_name = self.CONFIG['ibtracs']['table_name'][basin] class IBTrACSTable(object): pass # Return TC table if it exists if self.engine.dialect.has_table(self.engine, table_name): metadata = MetaData(bind=self.engine, reflect=True) t = metadata.tables[table_name] mapper(IBTrACSTable, t) return IBTrACSTable cols = [] # IBTrACS columns cols.append(Column('key', Integer, primary_key=True)) cols.append(Column('sid', String(13), nullable=False)) cols.append(Column('name', String(50))) cols.append(Column('date_time', DateTime, nullable=False)) cols.append(Column('basin', String(2), nullable=False)) cols.append(Column('lat', Float, nullable=False)) cols.append(Column('lon', Float, nullable=False)) cols.append(Column('pres', Integer)) cols.append(Column('wind', Integer)) cols.append(Column('r34_ne', Integer)) cols.append(Column('r34_se', Integer)) cols.append(Column('r34_sw', Integer)) cols.append(Column('r34_nw', Integer)) cols.append(Column('r50_ne', Integer)) cols.append(Column('r50_se', Integer)) cols.append(Column('r50_sw', Integer)) cols.append(Column('r50_nw', Integer)) cols.append(Column('r64_ne', Integer)) cols.append(Column('r64_se', Integer)) cols.append(Column('r64_sw', Integer)) cols.append(Column('r64_nw', Integer)) cols.append( Column('sid_date_time', String(50), nullable=False, unique=True)) metadata = MetaData(bind=self.engine) t = Table(table_name, metadata, *cols) metadata.create_all() mapper(IBTrACSTable, t) self.session.commit() return IBTrACSTable
def main(url, schema=None, apply=False, alphabetical=False, quiet=False): """ Print or apply a reflected or loaded database schema. """ # output from cargo.log import ( get_logger, enable_default_logging, ) enable_default_logging() # build the particular database engine from cargo.sql.alchemy import make_engine engine = make_engine(url) # load the appropriate schema if schema is None: # examine the database to construct a schema from sqlalchemy.schema import MetaData metadata = MetaData(bind=engine.connect(), reflect=True) else: # load an already-defined schema from cargo.sugar import value_by_name metadata = value_by_name(schema) # print or apply the schema if apply: if not quiet: get_logger("sqlalchemy.engine", level="DEBUG") metadata.create_all(engine) else: # print the DDL from sqlalchemy.schema import CreateTable if alphabetical: sorted_tables = sorted(metadata.sorted_tables, key=lambda t: t.name) else: sorted_tables = metadata.sorted_tables for table in sorted_tables: print CreateTable(table).compile(engine)
def test_create_table(engine, inspector): meta = MetaData() Table('test_pybigquery.test_table_create', meta, Column('integer_c', sqlalchemy.Integer, doc="column description"), Column('float_c', sqlalchemy.Float), Column('decimal_c', sqlalchemy.DECIMAL), Column('string_c', sqlalchemy.String), Column('text_c', sqlalchemy.Text), Column('boolean_c', sqlalchemy.Boolean), Column('timestamp_c', sqlalchemy.TIMESTAMP), Column('datetime_c', sqlalchemy.DATETIME), Column('date_c', sqlalchemy.DATE), Column('time_c', sqlalchemy.TIME), Column('binary_c', sqlalchemy.BINARY), bigquery_description="test table description", bigquery_friendly_name="test table name", bigquery_cluster_by=["integer_c", "string_c"], bigquery_partition_by="DATE(timestamp_c)", bigquery_require_partition_filtering=True) meta.create_all(engine) # Validate index creation indexes = inspector.get_indexes('test_pybigquery.test_table_create') assert len(indexes) == 2 assert indexes[0] == { 'name': 'partition', 'column_names': ['timestamp_c'], 'unique': False } assert indexes[1] == { 'name': 'clustering', 'column_names': ['integer_c', 'string_c'], 'unique': False } meta.drop_all(engine) # Test creating tables with declarative_base Base = declarative_base() class TableTest(Base): __tablename__ = 'test_pybigquery.test_table_create2' integer_c = Column(sqlalchemy.Integer, primary_key=True) float_c = Column(sqlalchemy.Float) Base.metadata.create_all(engine) Base.metadata.drop_all(engine)
def main(url, schema = None, apply = False, alphabetical = False, quiet = False): """ Print or apply a reflected or loaded database schema. """ # output from cargo.log import ( get_logger, enable_default_logging, ) enable_default_logging() # build the particular database engine from cargo.sql.alchemy import make_engine engine = make_engine(url) # load the appropriate schema if schema is None: # examine the database to construct a schema from sqlalchemy.schema import MetaData metadata = MetaData(bind = engine.connect(), reflect = True) else: # load an already-defined schema from cargo.sugar import value_by_name metadata = value_by_name(schema) # print or apply the schema if apply: if not quiet: get_logger("sqlalchemy.engine", level = "DEBUG") metadata.create_all(engine) else: # print the DDL from sqlalchemy.schema import CreateTable if alphabetical: sorted_tables = sorted(metadata.sorted_tables, key = lambda t: t.name) else: sorted_tables = metadata.sorted_tables for table in sorted_tables: print CreateTable(table).compile(engine)
def test_connection( ctx: object, metadata: MetaData, engine: Engine, real_transaction: bool = False, ctx_connection_attribute_name: str = '_test_fx_connection', ) -> typing.Generator: """Joining a SQLAlchemy session into an external transaction for test suit. :param object ctx: Context object to inject test connection into attribute :param MetaData metadata: SQLAlchemy schema metadata :param bool real_transaction: (Optional) Whether to use engine as connection directly or make separate connection. Default: `False` :param str ctx_connection_attribute_name: (Optional) Attribute name for injecting test connection to the context object Default: `'_test_fx_connection'` .. seealso:: Documentation of the SQLAlchemy session used in test suites. <http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#joining-a-session-into-an-external-transaction-such-as-for-test-suites> """ # noqa if real_transaction: metadata.create_all(engine) try: yield engine finally: metadata.drop_all(engine, checkfirst=True) return connection = engine.connect() try: metadata.drop_all(connection, checkfirst=True) transaction = connection.begin() try: metadata.create_all(bind=connection) setattr(ctx, ctx_connection_attribute_name, connection) try: yield connection finally: delattr(ctx, ctx_connection_attribute_name) finally: transaction.rollback() finally: connection.close() engine.dispose()
def open_db(self,engine,mode='open'): """open the database with engine URL. possible modes: drop - drop own tables dropall - drop all tables create - create the tables open - reflect the existing tables """ from sqlalchemy.ext.declarative import declarative_base, DeferredReflection from sqlalchemy.schema import MetaData from sqlalchemy import event, Table metadata = None @event.listens_for(Table, "column_reflect") def column_reflect(inspector, table, column_info): if table.metadata is metadata: if self.column_map and table.name in self.column_map: column_info['key'] = self.column_map[table.name][column_info['name']] else: column_info['key'] = column_info['name'] convention = dict( ix='ix_%(column_0_label)s', uq="uq_%(table_name)s_%(column_0_name)s", ck="ck_%(table_name)s_%(column_0_name)s", fk="fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", pk="pk_%(table_name)s" ) if mode=='dropall': # Clear out any existing tables metadata = MetaData(engine, naming_convention=convention) metadata.reflect() _drop_all(metadata) return metadata = MetaData(naming_convention=convention) self.Base = declarative_base(bind=engine, cls=DeferredReflection, metadata=metadata) metadata = self.Base.metadata self.declare(reflect= mode=='open') if mode=='drop': _drop_all(metadata) return if mode=='create': metadata.create_all(engine,checkfirst=False) from sqlalchemy.orm import sessionmaker self.Base.prepare(engine) self.reflect_classes() Session = sessionmaker(bind=engine) self.session = Session()
def reset_db(): global _Session # create a sqlalchemy session, engine session = create_session() _Session.close() # drop and recreate tables db_drop_everything(engine) metadata = MetaData() metadata.create_all(engine) # run sqlalchemy migrations __dir__ = os.path.dirname(os.path.realpath(__file__)) alembic_cfg = Config(os.path.join(__dir__, "alembic.ini")) command.upgrade(alembic_cfg, 'head') return session
def init_db(): metadata = MetaData() persons = Table('person', metadata, Column('id', Integer, Sequence('person_id_seq'), primary_key=True), Column('name', String(64)), Column('nickName', String(64)), Column('location', String(128)) ) goods = Table ('goods', metadata, Column('id', Integer, Sequence('good_id_seq'), primary_key=True), Column('category', ARRAY(String())), Column('detail', String()), Column('name', String(128)), Column('price',Float()), Column('description', String()), Column('brand', String(64)) ) reviews = Table('reviews', metadata, Column('id',Integer, Sequence('review_id_seq'), primary_key=True), Column('review', String()), Column('stars', Float()), Column('title', String()), Column('date_review', DateTime()), Column('helpful', Float()), Column('person_id', ForeignKey('person.id')), Column('product_id', ForeignKey('goods.id')) ) mapper(Persons, persons) mapper(Goods, goods) mapper(Reviews, reviews) metadata.create_all(bind=engine)
def initialize_db(self): if hasattr(self, '_metadata'): logger.info('already initialized') return metadata = MetaData(bind=self.engine) self.tables['victim_nicknames'] = Table(u'victim_nicknames', metadata, Column(u'nickname', VARCHAR(length=255), primary_key=True, nullable=False), ) self.tables['victim_jids'] = Table(u'victim_jids', metadata, Column(u'jid', VARCHAR(length=255), primary_key=True, nullable=False), ) self.tables['admin_jids'] = Table(u'admin_jids', metadata, Column(u'jid', VARCHAR(length=255), primary_key=True, nullable=False), ) metadata.create_all() self._metadata = metadata
def create_scs_era5_table(self, dt_cursor, hourtime): table_name = utils.gen_scs_era5_table_name(dt_cursor, hourtime) class SCSERA5(object): pass if self.engine.dialect.has_table(self.engine, table_name): metadata = MetaData(bind=self.engine, reflect=True) t = metadata.tables[table_name] mapper(SCSERA5, t) return SCSERA5 cols = [] cols.append(Column('key', Integer, primary_key=True)) cols.append(Column('x', Integer, nullable=False)) cols.append(Column('y', Integer, nullable=False)) cols.append(Column('lon', Float, nullable=False)) cols.append(Column('lat', Float, nullable=False)) cols.append(Column('land', Boolean, nullable=False)) cols.append(Column('x_y', String(30), nullable=False, unique=True)) # cols.append(Column('grid_pt_key', Integer, primary_key=True)) era5_cols = self.get_era5_columns() cols = cols + era5_cols metadata = MetaData(bind=self.engine) t = Table(table_name, metadata, *cols) metadata.create_all() mapper(SCSERA5, t) self.session.commit() return SCSERA5
class JobPriorities(MetaBase): __tablename__ = 'jobpriorities' id = Column(Integer, primary_key=True) testtype = Column(String(128), nullable=False, index=True) buildtype = Column(String(64), nullable=False, index=True) platform = Column(String(64), nullable=False, index=True) priority = Column(Integer) timeout = Column(Integer) expires = Column(DateTime) buildsystem = Column(String(64), nullable=False, index=True) __table_args__ = ( UniqueConstraint('testtype', 'buildtype', 'platform', name='unique_job'), ) def __init__(self, testtype, buildtype, platform, priority, timeout, expires, buildsystem): self.testtype = testtype self.buildtype = buildtype self.platform = platform self.priority = priority self.timeout = timeout self.expires = expires self.buildsystem = buildsystem if __name__ == "__main__": # create all table and column, so we must call this before # all things begin. Metadata.create_all(bind=engine, checkfirst=True)
def create_all(self, metadata: MetaData) -> None: metadata.create_all(self.engine)
class BaseTestCase(unittest.TestCase): def setUp(self): engine = create_engine('postgresql://postgres@localhost/pypet') self.metadata = MetaData(bind=engine) self.store_table = Table('store', self.metadata, Column('store_id', types.Integer, primary_key=True), Column('store_name', types.String), Column('country_id', types.Integer, ForeignKey('country.country_id'))) self.country_table = Table('country', self.metadata, Column('country_id', types.Integer, primary_key=True), Column('country_name', types.String), Column('region_id', types.Integer, ForeignKey('region.region_id'))) self.region_table = Table('region', self.metadata, Column('region_id', types.Integer, primary_key=True), Column('region_name', types.String)) self.product_table = Table('product', self.metadata, Column('product_id', types.Integer, primary_key=True), Column('product_name', types.String), Column('product_category_id', types.Integer, ForeignKey('product_category.product_category_id'))) self.product_category_table = Table('product_category', self.metadata, Column('product_category_id', types.Integer, primary_key=True), Column('product_category_name', types.String)) self.facts_table = Table('facts_table', self.metadata, Column('store_id', types.Integer, ForeignKey('store.store_id')), Column('date', types.Date), Column('product_id', types.Integer, ForeignKey('product.product_id')), Column('price', types.Float), Column('qty', types.Integer)) agg_name = ('agg_time_month_product_product_store_store' '_Unit Price_Quantity') self.agg_by_month_table = Table(agg_name, self.metadata, Column('store_store', types.Integer, ForeignKey('store.store_id')), Column('time_month', types.Date), Column('product_product', types.Integer, ForeignKey('product.product_id')), Column('Unit Price', types.Float), Column('Quantity', types.Integer), Column('fact_count', types.Integer)) agg_name = ('agg_time_year_store_country_product_product' '_Unit Price_Quantity') self.agg_by_year_country_table = Table(agg_name, self.metadata, Column('store_country', types.Integer, ForeignKey('country.country_id')), Column('time_year', types.Date), Column('product_product', types.Integer, ForeignKey('product.product_id')), Column('Unit Price', types.Float), Column('Quantity', types.Integer), Column('fact_count', types.Integer)) self.metadata.create_all() self.store_dim = Dimension('store', [ Hierarchy('default', [ Level('region', self.region_table.c.region_id, self.region_table.c.region_name), Level('country', self.country_table.c.country_id, self.country_table.c.country_name), Level('store', self.store_table.c.store_id, self.store_table.c.store_name)])]) self.product_dim = Dimension('product', [ Hierarchy('default', [ Level('category', self.product_category_table.c.product_category_id, self.product_category_table.c .product_category_name), Level('product', self.product_table.c.product_id, self.product_table.c.product_name)])]) self.time_dim = TimeDimension('time', self.facts_table.c.date, ['year', 'month', 'day']) unit_price = Measure('Unit Price', self.facts_table.c.price, aggregates.avg) quantity = Measure('Quantity', self.facts_table.c.qty, aggregates.sum) price = ((unit_price.aggregate_with(None) * quantity.aggregate_with(None)) .aggregate_with(aggregates.sum).label('Price')) self.cube = Cube(self.metadata, self.facts_table, [self.store_dim, self.product_dim, self.time_dim], [unit_price, quantity, price], fact_count_column=self.facts_table.c.qty) self.region_table.insert({'region_id': 1, 'region_name': 'Europe'}).execute() self.country_table.insert({'region_id': 1, 'country_name': 'France', 'country_id': 1}).execute() self.country_table.insert({'region_id': 1, 'country_name': 'Germany', 'country_id': 2}).execute() self.region_table.insert({'region_id': 2, 'region_name': 'America'}).execute() self.country_table.insert({'region_id': 2, 'country_name': 'USA', 'country_id': 3}).execute() self.country_table.insert({'region_id': 2, 'country_name': 'Canada', 'country_id': 4}).execute() self.store_table.insert({ 'store_id': 1, 'store_name': 'ACME.fr', 'country_id': 1}).execute() self.store_table.insert({ 'store_id': 2, 'store_name': 'ACME.de', 'country_id': 2}).execute() self.store_table.insert({ 'store_id': 3, 'store_name': 'Food Mart.fr', 'country_id': 1}).execute() self.store_table.insert({ 'store_id': 4, 'store_name': 'Food Mart.de', 'country_id': 2}).execute() self.store_table.insert({ 'store_id': 5, 'store_name': 'ACME.us', 'country_id': 3}).execute() self.store_table.insert({ 'store_id': 6, 'store_name': 'Food Mart.us', 'country_id': 3}).execute() self.store_table.insert({ 'store_id': 7, 'store_name': 'ACME.ca', 'country_id': 4}).execute() self.store_table.insert({ 'store_id': 8, 'store_name': 'Food Mart.ca', 'country_id': 4}).execute() self.product_category_table.insert({ 'product_category_id': 1, 'product_category_name': 'Vegetables'}).execute() self.product_category_table.insert({ 'product_category_id': 2, 'product_category_name': 'Shoes'}).execute() self.product_table.insert({ 'product_id': 1, 'product_category_id': 1, 'product_name': 'Carrots'}).execute() self.product_table.insert({ 'product_id': 2, 'product_category_id': 1, 'product_name': 'Bananas'}).execute() self.product_table.insert({ 'product_id': 3, 'product_category_id': 2, 'product_name': 'Red shoes'}).execute() self.product_table.insert({ 'product_id': 4, 'product_category_id': 2, 'product_name': 'Green shoes'}).execute() self.product_table.insert({ 'product_id': 5, 'product_category_id': 2, 'product_name': 'Blue shoes'}).execute() years = cycle([2009, 2010, 2011]) months = cycle([1, 5, 8, 9, 11]) days = cycle([3, 12, 21, 29]) prices = iter(cycle([100, 500, 1000])) quantities = iter(cycle([1, 5, 1, 2, 3, 20, 8])) values = iter((date(*value) for value in izip(years, months, days))) for value in self.product_table.select().with_only_columns([ self.product_table.c.product_id, self.store_table.c.store_id]).execute(): self.facts_table.insert({ 'product_id': value.product_id, 'store_id': value.store_id, 'date': next(values), 'qty': next(quantities), 'price': next(prices)}).execute() results = (self.facts_table.select().with_only_columns([ (func.sum(self.facts_table.c.price * self.facts_table.c.qty) / func.sum(self.facts_table.c.qty)) .label('Unit Price'), func.sum(self.facts_table.c.qty).label('Quantity'), func.sum(self.facts_table.c.qty).label('fact_count'), self.facts_table.c.product_id.label('product_product'), self.facts_table.c.store_id.label('store_store'), func.date_trunc('month', self.facts_table.c.date).label('time_month')]) .group_by(func.date_trunc('month', self.facts_table.c.date), self.facts_table.c.product_id, self.facts_table.c.store_id) .execute()) for res in results: self.agg_by_month_table.insert().execute(dict(res)) second_agg = (self.facts_table.select().with_only_columns([ (func.sum(self.facts_table.c.price * self.facts_table.c.qty) / func.sum(self.facts_table.c.qty)) .label('Unit Price'), func.sum(self.facts_table.c.qty).label('Quantity'), func.sum(self.facts_table.c.qty).label('fact_count'), self.facts_table.c.product_id.label('product_product'), self.store_table.c.country_id.label('store_country'), func.date_trunc('year', self.facts_table.c.date).label('time_year')]) .where(self.facts_table.c.store_id == self.store_table.c.store_id) .group_by(self.facts_table.c.product_id.label('product_product'), self.store_table.c.country_id.label('store_country'), func.date_trunc('year', self.facts_table.c.date).label('time_year')) .execute()) for res in second_agg: self.agg_by_year_country_table.insert().execute(dict(res)) def tearDown(self): self.metadata.drop_all()
# i should probably put this in a different file but fuckit if row_table_name == SOFTWARE_ENG_TRAITS_TABLENAME: SOFTWARE_ENG_TRAITS_MAPPING[row[COL_ORIGINAL_ANSWER]] = row_column_name # map csv fieldname to table/col CSV_FIELD_MAPPING[row_csv_fieldname] = { 'table': row_table_name, 'column': row_column_name, 'datatype': row_type or "str" } # don't forget to add the last one :) add_table(metadata, current_table_name, columns) # this actually creates the database metadata.create_all(engine) metadata.reflect(bind=engine) print("Created empty db at {}".format(destfilename.resolve())) # -- end create empty db # -- start db load print("Starting load from {}".format(args.datafile)) datafile = csv.DictReader(args.datafile) conn = engine.connect() for row in datafile: sql_data = defaultdict(defaultdict)
self.priority = priority class JobPriorities(MetaBase): __tablename__ = 'jobpriorities' id = Column(Integer, primary_key=True) testtype = Column(String(128), nullable=False, index=True) buildtype = Column(String(64), nullable=False, index=True) platform = Column(String(64), nullable=False, index=True) priority = Column(Integer) timeout = Column(Integer) expires = Column(DateTime) buildsystem = Column(String(64), nullable=False, index=True) def __init__(self, testtype, buildtype, platform, priority, timeout, expires, buildsystem): self.testtype = testtype self.buildtype = buildtype self.platform = platform self.priority = priority self.timeout = timeout self.expires = expires self.buildsystem = buildsystem if __name__ == "__main__": # create all table and column, so we must call this before # all things begin. Metadata.create_all(bind=engine, checkfirst=True)
class DBConnection(): def __init__(self, dbFileName="sofaspud.db"): print "Database is being initialized" self.dbFileName = dbFileName self.dbPath = os.path.join(app.DATA_DIR, self.dbFileName) self.engine = create_engine("sqlite:///%s" % self.dbPath) self.metadata = MetaData(self.engine) self.session = scoped_session( sessionmaker(bind=self.engine, autocommit=True)) # DB exists, do upgrade if os.path.isfile(self.dbPath): self.doUpgrade = True else: self.doUpgrade = False # DB VERSION latestDatabaseVersion = 1 dbVersionTable = Table('DbVersion', self.metadata, Column('version', Integer, primary_key=True)) movieTable = Table( 'Movie', self.metadata, Column('id', Integer, primary_key=True), Column('dateAdded', DateTime(), default=datetime.datetime.utcnow), Column('dateChanged', DateTime(), default=datetime.datetime.utcnow), Column('name', String()), Column('year', Integer), Column('status', String()), Column('movieDb', String())) serieTable = Table( 'Serie', self.metadata, Column('id', Integer, primary_key=True), Column('dateAdded', DateTime(), default=datetime.datetime.utcnow), Column('dateChanged', DateTime(), default=datetime.datetime.utcnow), Column('tvDb', String()), Column('name', String()), Column('overview', Text()), Column('network', String()), Column('genre', String()), Column('runtime', String()), Column('airing', String()), Column('startYear', String()), Column('language', String()), Column('status', String()), ) episodeTable = Table( 'Episode', self.metadata, Column('id', Integer, primary_key=True), Column('dateAdded', DateTime(), default=datetime.datetime.utcnow), Column('dateChanged', DateTime(), default=datetime.datetime.utcnow), Column('serieid', Integer, ForeignKey('Serie.id')), Column('tvDb', String()), Column('name', String()), Column('season', Integer), Column('episode', Integer), Column('description', Text()), Column('airDate', DateTime()), Column('status', String()), ) # Mappers versionMapper = mapper(DbVersion, dbVersionTable) movieMapper = mapper(Movie, movieTable) serieMapper = mapper(Serie, serieTable, properties={'episode': relation(Episode)}) episodeMapper = mapper(Episode, episodeTable) self.metadata.create_all() if self.doUpgrade: upgradeDb() else: for nr in range(1, latestDatabaseVersion + 1): self.session.add(DbVersion(nr)) def upgradeDb(): currentVersion = self.session.query(DbVersion).order_by( desc(DbVersion.version)).first() if currentVersion: if currentVersion.version == latestDatabaseVersion: log.debug('Database is up to date.') return
def analyze(data,drop=False,load=False): engine = create_engine('sqlite:////home/bkoziol/tmp/profiling.sqlite') metadata = MetaData(bind=engine) Base = declarative_base(metadata=metadata) Session = sessionmaker(bind=engine) class SqlBase(object): @classmethod def get_or_create(cls,s,kwds,commit=False): qq = s.query(cls).filter_by(**kwds) try: obj = qq.one() except NoResultFound: obj = cls(**kwds) s.add(obj) if commit: s.commit() return(obj) class Scenario(SqlBase,Base): __tablename__ = 'scenario' sid = Column(Integer,primary_key=True) name = Column(String,nullable=False) class Function(SqlBase,Base): __tablename__ = 'function' fid = Column(Integer,primary_key=True) name = Column(String,nullable=False) class FileName(SqlBase,Base): __tablename__ = 'filename' fnid = Column(Integer,primary_key=True) name = Column(String,nullable=False) class Profile(SqlBase,Base): __tablename__ = 'profile' id = Column(Integer,primary_key=True) sid = Column(Integer,ForeignKey(Scenario.sid)) fid = Column(Integer,ForeignKey(Function.fid)) fnid = Column(Integer,ForeignKey(FileName.fnid),nullable=True) ncalls = Column(Integer,nullable=False) tottime = Column(Float,nullable=False) percall = Column(Float,nullable=False) cumtime = Column(Float,nullable=False) filename = relationship(FileName) scenario = relationship(Scenario) function = relationship(Function) def report(self,total): msg = [self.scenario.name] try: msg.append(self.filename.name) except AttributeError: msg.append('') msg.append(self.function.name) msg.append(str(self.tottime)) msg.append(str(self.tottime/float(total))) msg.append(str(total)) return(','.join(msg)) @staticmethod def report_headers(): return('scenario,filename,function,tottime,perctime,exetime') if load: if drop: metadata.drop_all(checkfirst=True) metadata.create_all(checkfirst=True) s = Session() for kwds in data: with open(os.path.join('/home/bkoziol/tmp',kwds['name']+'.txt'),'r') as out: txt = out.read() profiles = re.split('finished ::.*',txt) profiles = profiles[0:-1] for profile in profiles: profile = profile.strip() scenario_name = re.match('starting :: (.*)',profile).group(1) scenario = Scenario.get_or_create(s,dict(name=scenario_name)) table = re.match('.*lineno\(function\)(.*)',profile,flags=re.DOTALL).group(1).strip() lines = re.split('\n',table) for line in lines: line = line.strip() # print line elements = re.split(' {2,}',line) if '{' in line and '}' in line: filename = None else: try: filename_name = re.match('.* (.*):.*',elements[4]).group(1) except: import ipdb;ipdb.set_trace() filename = FileName.get_or_create(s,dict(name=filename_name)) rm = re.match('.*\((.*)\)|.*{(.*)}',elements[4]) if rm.group(1) is None: function_name = rm.group(2) else: function_name = rm.group(1) function = Function.get_or_create(s,dict(name=function_name)) obj = Profile() obj.ncalls = elements[0] obj.tottime = elements[1] obj.percall = elements[2] obj.cumtime = elements[3] obj.filename = filename obj.scenario = scenario obj.function = function s.add(obj) s.commit() else: s = Session() print(Profile.report_headers()) for scenario in s.query(Scenario): ## get the total time total = s.query(func.sum(Profile.tottime)).filter_by(scenario=scenario) total = total.one()[0] ## get the top ten time things top = s.query(Profile).filter_by(scenario=scenario) top = top.order_by(Profile.tottime.desc()) top = top.limit(10) for obj in top: print obj.report(total) import ipdb;ipdb.set_trace()
('level_sets', 'level_set_id'), ('qc_flags', 'qc_flag_id')] read_engine = create_engine(args.dsn) logger.info("Output to ".format(args.outdsn)) write_engine = create_engine(args.outdsn) meta = MetaData(bind=write_engine) meta.reflect(bind=read_engine) logger.info("Unsetting all of the sequence defaults") for table_name, column_name in sequences: meta.tables[table_name].columns[column_name].server_default = None logger.info("Creating all of the tables") meta.create_all() rSession = sessionmaker(bind=read_engine)() wSession = sessionmaker(bind=write_engine)() logger.info("Querying the data files") for table in meta.sorted_tables: logger.info("Table {}".format(table.name)) NewRecord = quick_mapper(table) columns = table.columns.keys() for record in rSession.query(table).all(): data = dict([(str(column), getattr(record, column)) for column in columns]) wSession.merge(NewRecord(**data)) wSession.commit()
class HeapSQL: def __init__(self, delete_all=True): self.name = 'priority_queue' self.instance = uuid.uuid1().hex self.cli = create_engine( "postgresql://*****:*****@172.17.0.4/heapq", echo=False) self.conn = self.cli.connect() self.meta = MetaData() self.heapq = Table( self.name, self.meta, Column('id', Integer, primary_key=True), Column('instance', String, index=True), # UUID Column('cost', Float, index=True), Column('count', Integer), Column('data', Binary), Index('idx_key', 'instance', 'cost')) if delete_all: self.delete() self.meta.create_all(self.cli) logger_memory.info("-" * 50) logger_memory.info("HeapSQL: init") def delete(self): try: self.heapq.drop(self.cli) except Exception as e: print(f'HeapSQL: delete: Runtime error: {e}') logger_memory.info("HeapSQL: delete") def heappush(self, cost: float, count: int, data): """Push item onto heap, maintaining the heap invariant.""" # The Base64 encoded binary value must not have embedded newlines \n. item = { 'instance': self.instance, 'cost': cost, 'count': count, 'data': pickle.dumps(data) } self.conn.execute(self.heapq.insert(values=item)) logger_memory.info(f"HeapSQL: heappush count: {count}") def heappop(self): """Pop the smallest item off the heap, maintaining the heap invariant.""" row = self.conn.execute( select([ self.heapq.c.id, min(self.heapq.c.cost).label("min_cost"), self.heapq.c.count, self.heapq.c.data ]).where(self.heapq.c.instance == self.instance).group_by( self.heapq.c.id, self.heapq.c.count, self.heapq.c.data)).fetchone() if row is not None: self.conn.execute( delete(self.heapq).where(self.heapq.c.id == row.id)) logger_memory.info( f"HeapSQL: heappop min_cost: {row.min_cost} count: {row.count} " ) return row.min_cost, row.count, pickle.loads(row.data) else: return None, None, None def heaplen(self): row = self.conn.execute( select([ func.count() ]).where(self.heapq.c.instance == self.instance)).fetchone() len = row[0] logger_memory.info(f"HeapSQL: heaplen len: {len}") return len
def initialize(self): metadata = MetaData() self.profiles = Table( "profiles", metadata, Column("id", INTEGER, primary_key=True, autoincrement=True), Column("vendor", VARCHAR(255), nullable=False, server_default=""), Column("material", VARCHAR(255), nullable=False, server_default=""), Column("density", REAL, nullable=False, server_default="0"), Column("diameter", REAL, nullable=False, server_default="0")) self.spools = Table( "spools", metadata, Column("id", INTEGER, primary_key=True, autoincrement=True), Column("profile_id", INTEGER, nullable=False), Column("name", VARCHAR(255), nullable=False, server_default=""), Column("cost", REAL, nullable=False, server_default="0"), Column("weight", REAL, nullable=False, server_default="0"), Column("used", REAL, nullable=False, server_default="0"), Column("temp_offset", INTEGER, nullable=False, server_default="0"), ForeignKeyConstraint(["profile_id"], ["profiles.id"], ondelete="RESTRICT")) self.selections = Table( "selections", metadata, Column( "tool", INTEGER, ), Column("client_id", VARCHAR(36)), Column("spool_id", INTEGER), PrimaryKeyConstraint("tool", "client_id", name="selections_pkey"), ForeignKeyConstraint(["spool_id"], ["spools.id"], ondelete="CASCADE")) self.versioning = Table( "versioning", metadata, Column("schema_id", INTEGER, primary_key=True, autoincrement=False)) self.modifications = Table( "modifications", metadata, Column("table_name", VARCHAR(255), nullable=False, primary_key=True), Column("action", VARCHAR(255), nullable=False), Column("changed_at", TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP"))) if self.DIALECT_POSTGRESQL == self.engine.dialect.name: def should_create_function(name): row = self.conn.execute( "select proname from pg_proc where proname = '%s'" % name).scalar() return not bool(row) def should_create_trigger(name): row = self.conn.execute( "select tgname from pg_trigger where tgname = '%s'" % name).scalar() return not bool(row) trigger_function = DDL(""" CREATE FUNCTION update_lastmodified() RETURNS TRIGGER AS $func$ BEGIN INSERT INTO modifications (table_name, action, changed_at) VALUES(TG_TABLE_NAME, TG_OP, CURRENT_TIMESTAMP) ON CONFLICT (table_name) DO UPDATE SET action=TG_OP, changed_at=CURRENT_TIMESTAMP WHERE modifications.table_name=TG_TABLE_NAME; PERFORM pg_notify(TG_TABLE_NAME, TG_OP); RETURN NULL; END; $func$ LANGUAGE plpgsql; """) if should_create_function("update_lastmodified"): event.listen(metadata, "after_create", trigger_function) for table in [self.profiles.name, self.spools.name]: for action in ["INSERT", "UPDATE", "DELETE"]: name = "{table}_on_{action}".format(table=table, action=action.lower()) trigger = DDL(""" CREATE TRIGGER {name} AFTER {action} on {table} FOR EACH ROW EXECUTE PROCEDURE update_lastmodified() """.format(name=name, table=table, action=action)) if should_create_trigger(name): event.listen(metadata, "after_create", trigger) elif self.DIALECT_SQLITE == self.engine.dialect.name: for table in [self.profiles.name, self.spools.name]: for action in ["INSERT", "UPDATE", "DELETE"]: name = "{table}_on_{action}".format(table=table, action=action.lower()) trigger = DDL(""" CREATE TRIGGER IF NOT EXISTS {name} AFTER {action} on {table} FOR EACH ROW BEGIN REPLACE INTO modifications (table_name, action) VALUES ('{table}','{action}'); END """.format(name=name, table=table, action=action)) event.listen(metadata, "after_create", trigger) metadata.create_all(self.conn, checkfirst=True)
) # Strace argument holes argument_holes = Table( 'syscall_argument_holes', metadata, Column('id', INTEGER, primary_key=True), Column('syscall', LONGVARCHAR, nullable=False), Column('index', INTEGER, nullable=False), UniqueConstraint('syscall', 'index'), ) # Known executables that don't have a corresponding strace untraced_executables = Table( 'untraced_executables', metadata, Column('id', INTEGER, primary_key=True), Column('system', LONGVARCHAR, nullable=False), Column('executable', LONGVARCHAR, nullable=False), Column('arguments_hash', SHA1, nullable=False), Column('arguments', JSON, nullable=False), Index('system', 'executable', 'arguments_hash'), ) # Create all tables try: metadata.create_all() except OperationalError as e: logger.exception('Unable to create database tables.') exit(1)
def init_tables(postgre_engine, drop_all=False): with postgre_engine.begin() as connection: if not connection.dialect.has_table(connection, 'General Information'): metadata = MetaData(connection) Table( 'General Information', metadata, Column('Date', Date, primary_key=True, nullable=False), Column('Vendor', Integer, primary_key=True, nullable=False), Column('BMP Vendor', Text), Column('Vdr Contacts', Text, nullable=False), Column('Factory', Text, nullable=False), Column('Fty Address', Text, nullable=False), Column('Fty Contacts', Text, nullable=False), Column('Auditor', Text, nullable=False), Column('FRM Lebel', Text, nullable=False)) metadata.create_all() if not connection.dialect.has_table(connection, 'SIP Elements'): metadata = MetaData(connection) Table( 'SIP Elements', metadata, Column('Vendor Style/PID', Integer, primary_key=True, nullable=False), Column('PPR document', Text, nullable=False), Column('Red Seal Sample', Text, nullable=False), Column('Technical Specs & Construction', Text, nullable=False), Column('Final Item Set-Up form', Text, nullable=False), Column('Total Program Quantity and Deliveries', Text, nullable=False), Column('Color Standards', Text, nullable=False), Column('Production Color/Finish Representation', Text, nullable=False), Column( 'Trims, Accessories, Hardware, Components and Labeling', Text, nullable=False), Column('Yellow Seal Sample', Text, nullable=False), Column('Product Testing Results', Text, nullable=False), Column('Floor Ready Requirements', Text, nullable=False), Column('Retail Packaging Design Sample', Text, nullable=False), Column('Carton marks and Labels', Text, nullable=False), Column('Factory Internal Reports – Inspection and Testing', Text, nullable=False), Column('TCPS Inspection Reports', Text, nullable=False), Column('Completed Packing List', Text, nullable=False)) metadata.create_all() if not connection.dialect.has_table(connection, 'PIF Info'): metadata = MetaData(connection) Table( 'PIF Info', metadata, Column('Vendor Style/PID', Integer, primary_key=True, nullable=False), Column('PO Number', BigInteger, primary_key=True, nullable=False), Column('Purpose', Text, nullable=False), Column('Ship Begin Date', Date, nullable=False), Column('Ship End Date', Date, nullable=False)) metadata.create_all() if not connection.dialect.has_table(connection, 'POM Info'): metadata = MetaData(connection) Table('POM Info', metadata, Column('PID/Style', Integer, nullable=False), Column('DPCI', Integer, nullable=False), Column('PO Included', BigInteger, nullable=False), Column('Insp Type', Text, nullable=False), Column('PO Qty', Integer, nullable=False), Column('Available Qty', BigInteger, nullable=False), Column('Description', Text, nullable=False), Column('PWI?', Text, nullable=False)) metadata.create_all() if not connection.dialect.has_table(connection, 'Item Info'): metadata = MetaData(connection) Table('Item Info', metadata, Column('Item', Integer, nullable=False), Column('Item Description', Text, nullable=False), Column('PO(s)', BigInteger, nullable=False), Column('Order Quantity', Integer, nullable=False), Column('Available Quantity', Integer, nullable=False), Column('Vendor Style/PID', Integer, nullable=False), Column('Assortment Item(s)', Text)) metadata.create_all() if drop_all: table_names = [ 'General Information', 'SIP Elements', 'PIF Info', 'POM Info', 'Item Info' ] connection.execute('TRUNCATE {} RESTART IDENTITY;'.format( ','.join('"' + table + '"' for table in table_names)))