Esempio n. 1
0
def store_initial_soil_water(engine, fname_HDFstore, args):
    """retrieves the INITIAL_SOIL_WATER table from a CGMS12 database.

    if the --crop_no option is used only the records for the given crop_no
    will be retrieved.
    """
    meta = MetaData(engine)
    tbl_isw = Table("initial_soil_water", meta, autoload=True)

    # retrieve distinct crop types from DB table
    if args.crop_no is not None:
        s = sa.select([tbl_isw.c.crop_no]).distinct()
        crops = [row[0] for row in s.execute()]
        if args.crop_no not in crops:
            print("Crop ID specified with --cropno (%s) not found in INITIAL_SOIL_WATER table! Returning..." % args.crop_no)
            sys.exit()

    # Select distinct years to iterate over
    s = sa.select([tbl_isw.c.year]).distinct()
    years = s.execute()
    dataset_name = "/initial_soil_water"
    with pd.io.pytables.HDFStore(fname_HDFstore) as store:
        for yr, in sorted(years):
            if args.crop_no:
                s = tbl_isw.select().where(sa.and_(tbl_isw.c.year == yr,
                                                   tbl_isw.c.crop_no == args.crop_no))
                print("Storing initial_soil_water for crop %i and year %i" % (args.crop_no, yr))
            else:
                s = tbl_isw.select().where(tbl_isw.c.year == yr)
                print("Storing initial_soil_water for year %i" % yr)
            df_isw = pd.read_sql(s, engine)
            if dataset_name in store:
                store.append(dataset_name, df_isw, data_columns=["grid_no", "stu_no", "crop_no", "year"])
            else:
                store.put(dataset_name, df_isw, format="table", data_columns=["grid_no", "stu_no", "crop_no", "year"])
def upgrade(migrate_engine):
    # Upgrade operations go here. Don't create your own engine;
    # bind migrate_engine to your metadata
    meta = MetaData()
    meta.bind = migrate_engine
    #
    # New Tables
    #
    instance_faults = Table('instance_faults', meta,
            Column('created_at', DateTime(timezone=False)),
            Column('updated_at', DateTime(timezone=False)),
            Column('deleted_at', DateTime(timezone=False)),
            Column('deleted', Boolean(create_constraint=True, name=None),
                    default=False),
            Column('id', Integer(), primary_key=True, nullable=False),
            Column('instance_uuid', String(36, ForeignKey('instances.uuid'))),
            Column('code', Integer(), nullable=False),
            Column('message',
                   String(length=255, convert_unicode=False,
                          assert_unicode=None,
                          unicode_error=None, _warn_on_bytestring=False)),
            Column('details',
                   Text(length=None, convert_unicode=False,
                        assert_unicode=None,
                        unicode_error=None, _warn_on_bytestring=False)),
            )
    try:
        instance_faults.create()
    except Exception:
        LOG.info(repr(instance_faults))
Esempio n. 3
0
    def test_boolean(self):
        """Test that the boolean only treats 1 as True

        """

        meta = MetaData(testing.db)
        t = Table('bool_table', meta, Column('id', Integer,
                  primary_key=True), Column('boo',
                  Boolean(create_constraint=False)))
        try:
            meta.create_all()
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (1, 'false');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (2, 'true');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (3, '1');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (4, '0');")
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (5, 1);')
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (6, 0);')
            eq_(t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
                [(3, True), (5, True)])
        finally:
            meta.drop_all()
Esempio n. 4
0
def store_crop_calendar(engine, fname_HDFstore, args):
    """retrieves the CROP_CALENDAR table from a CGMS12 database.

    if the --crop_no option is used only the records for the given crop_no
    will be retrieved.
    """
    meta = MetaData(engine)
    tbl_cal = Table("crop_calendar", meta, autoload=True)

    # retrieve distinct crop types from DB table
    s = sa.select([tbl_cal.c.crop_no]).distinct()
    crops = [row[0] for row in s.execute()]

    # Check if only one crop type should be selected from DB
    if args.crop_no is not None:
        if args.crop_no not in crops:
            print("Crop ID specified with --cropno (%s) not found in CROP_CALENDAR table! Returning..." % args.crop_no)
            sys.exit()
        crops = [args.crop_no]

    # Start pulling crop_calendar data from DB
    dataset_name = "/crop_calendar"
    with pd.io.pytables.HDFStore(fname_HDFstore) as store:
        for crop in crops:
            print("Storing crop_calendar for crop %i" % crop)
            s = tbl_cal.select().where(tbl_cal.c.crop_no==crop)
            df_cal = pd.read_sql(s, engine)
            if dataset_name in store:
                store.append(dataset_name, df_cal, data_columns=["grid_no", "crop_no", "year"])
            else:
                store.put(dataset_name, df_cal, format="table", data_columns=["grid_no", "crop_no", "year"])
Esempio n. 5
0
def _create_shadow_tables(migrate_engine):
    meta = MetaData(migrate_engine)
    meta.reflect(migrate_engine)
    table_names = meta.tables.keys()

    meta.bind = migrate_engine

    for table_name in table_names:
        table = Table(table_name, meta, autoload=True)

        columns = []
        for column in table.columns:
            column_copy = None
            # NOTE(boris-42): BigInteger is not supported by sqlite, so
            #                 after copy it will have NullType, other
            #                 types that are used in Nova are supported by
            #                 sqlite.
            if isinstance(column.type, NullType):
                column_copy = Column(column.name, BigInteger(), default=0)
            column_copy = column.copy()
            columns.append(column_copy)

        shadow_table_name = 'shadow_' + table_name
        shadow_table = Table(shadow_table_name, meta, *columns,
                             mysql_engine='InnoDB')
        try:
            shadow_table.create(checkfirst=True)
        except Exception:
            LOG.info(repr(shadow_table))
            LOG.exception(_('Exception while creating table.'))
            raise
Esempio n. 6
0
def populate_main_sql_testdatabase(engine):
    meta = MetaData()

    table = Table('events', meta,
                  Column('id', Integer, primary_key=True, ),
                  Column('time', String(30)),
                  Column('source_ip', String(30)),
                  Column('source_port', String(30)),
                  Column('request_url', String(500)),
                  Column('request_raw', String(65536)),
                  Column('pattern', String(20)),
                  Column('filename', String(500)),
    )

    meta.create_all(engine)

    insert_dicts = []
    data = open(os.path.join(file_dir, 'data/events_500.bson'), 'r').read()
    for item in bson.decode_all(data):
        new_item = {"source_ip": item["source_ip"],
                    "source_port": item["source_port"],
                    "request_url": item["request"]["url"],
                    "pattern": item["pattern"]}

        insert_dicts.append(new_item)

    conn = engine.connect()
    print "Inserted: {0}".format(len(insert_dicts))
    conn.execute(table.insert(), insert_dicts)
Esempio n. 7
0
def dump_table(table, filename, con, std=None, delimiter=',', format=None, encoding='utf-8', inspector=None):
    from uliweb.utils.common import str_value
    from StringIO import StringIO
    import csv
    
    if not std:
        if isinstance(filename, (str, unicode)):
            std = open(filename, 'w')
        else:
            std = filename
    else:
        std = sys.stdout
    #add inspector table columns process, will not use model fields but database fields
    if inspector:
        meta = MetaData()
        table = Table(table.name, meta)
        inspector.reflecttable(table, None)
        
    result = do_(table.select())
    fields = [x.name for x in table.c]
    if not format:
        print >>std, '#' + ' '.join(fields)
    elif format == 'txt':
        print >>std, '#' + ','.join(fields)
    for r in result:
        if not format:
            print >>std, r
        elif format == 'txt':
            buf = StringIO()
            fw = csv.writer(buf, delimiter=delimiter)
            fw.writerow([str_value(x, encoding=encoding) for x in r])
            print >>std, buf.getvalue().rstrip()
        else:
            raise Exception, "Can't support the text format %s" % format
Esempio n. 8
0
    def test_preexecute_passivedefault(self):
        """test that when we get a primary key column back from
        reflecting a table which has a default value on it, we pre-
        execute that DefaultClause upon insert."""

        try:
            meta = MetaData(testing.db)
            testing.db.execute(
                """
             CREATE TABLE speedy_users
             (
                 speedy_user_id   SERIAL     PRIMARY KEY,

                 user_name        VARCHAR    NOT NULL,
                 user_password    VARCHAR    NOT NULL
             );
            """
            )
            t = Table("speedy_users", meta, autoload=True)
            r = t.insert().execute(user_name="user", user_password="******")
            assert r.inserted_primary_key == [1]
            l = t.select().execute().fetchall()
            assert l == [(1, "user", "lala")]
        finally:
            testing.db.execute("drop table speedy_users")
Esempio n. 9
0
def update_dat_crimes():
    # Step Five: Update Main Crime table
    dat_crime_table = Table('dat_chicago_crimes_all', Base.metadata, 
        autoload=True, autoload_with=engine, extend_existing=True)
    src_crime_table = Table('src_chicago_crimes_all', Base.metadata, 
        autoload=True, autoload_with=engine, extend_existing=True)
    try:
        new_crime_table = Table('new_chicago_crimes_all', Base.metadata, 
            autoload=True, autoload_with=engine, extend_existing=True)
    except NoSuchTableError:
        return None
    excluded_cols = ['end_date', 'current_flag', 'chicago_crimes_all_row_id']
    dat_cols = [c for c in dat_crime_table.columns.keys() if c not in excluded_cols]
    excluded_cols.append('start_date')
    src_cols = [c for c in src_crime_table.columns if c.name not in excluded_cols]
    src_cols.append(text("'%s' AS start_date" % datetime.now().strftime('%Y-%m-%d')))
    ins = dat_crime_table.insert()\
        .from_select(
            dat_cols,
            select(src_cols)\
                .select_from(src_crime_table.join(new_crime_table,
                    src_crime_table.c.id == new_crime_table.c.id))
        )
    conn = engine.contextual_connect()
    conn.execute(ins)
    return 'Crime Table updated'
Esempio n. 10
0
 def _boolean_fixture(self):
     t = Table(
         'hasbool', self.metadata,
         Column('x', Boolean(create_constraint=True, name='ck1')),
         Column('y', Integer)
     )
     t.create(self.conn)
Esempio n. 11
0
    def test_foreignkey_missing_insert(self):
        Table("t1", self.metadata, Column("id", Integer, primary_key=True))
        t2 = Table(
            "t2",
            self.metadata,
            Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
        )
        self.metadata.create_all()

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        for eng in [
            engines.testing_engine(options={"implicit_returning": False}),
            engines.testing_engine(options={"implicit_returning": True}),
        ]:
            with expect_warnings(
                ".*has no Python-side or server-side default.*"
            ):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    eng.execute,
                    t2.insert(),
                )
Esempio n. 12
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # create new table
    task_log = Table('task_log', meta,
            Column('created_at', DateTime(timezone=False)),
            Column('updated_at', DateTime(timezone=False)),
            Column('deleted_at', DateTime(timezone=False)),
            Column('deleted',
                    Boolean(create_constraint=True, name=None)),
            Column('id', Integer(),
                    primary_key=True,
                    nullable=False,
                    autoincrement=True),
            Column('task_name', String(255), nullable=False),
            Column('state', String(255), nullable=False),
            Column('host', String(255), index=True, nullable=False),
            Column('period_beginning', String(255),
                                       index=True, nullable=False),
            Column('period_ending', String(255), index=True, nullable=False),
            Column('message', String(255), nullable=False),
            Column('task_items', Integer()),
            Column('errors', Integer()),
            )
    try:
        task_log.create()
    except Exception:
        meta.drop_all(tables=[task_log])
        raise

    if migrate_engine.name == "mysql":
        migrate_engine.execute("ALTER TABLE task_log "
                "Engine=InnoDB")
Esempio n. 13
0
    def _test_selfref_fk(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('bar_id', Integer, ForeignKey('bar.id')),
            Column('data', String(50)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'data': 'x', 'bar_id': None})
        self.conn.execute(bar.insert(), {'id': 2, 'data': 'y', 'bar_id': 1})

        with self.op.batch_alter_table("bar", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('bar', ['id'], ['bar_id'])]
        )
Esempio n. 14
0
    def test_reflect_unique_index(self):
        insp = inspect(testing.db)

        meta = self.metadata

        # a unique index OTOH we are able to detect is an index
        # and not a unique constraint
        uc_table = Table('pgsql_uc', meta,
                         Column('a', String(10)),
                         Index('ix_a', 'a', unique=True))

        uc_table.create()

        indexes = dict((i['name'], i) for i in insp.get_indexes('pgsql_uc'))
        constraints = set(i['name']
                          for i in insp.get_unique_constraints('pgsql_uc'))

        self.assert_('ix_a' in indexes)
        assert indexes['ix_a']['unique']
        self.assert_('ix_a' not in constraints)

        reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)

        indexes = dict((i.name, i) for i in reflected.indexes)
        constraints = set(uc.name for uc in reflected.constraints)

        self.assert_('ix_a' in indexes)
        assert indexes['ix_a'].unique
        self.assert_('ix_a' not in constraints)
Esempio n. 15
0
 def __init__(self):
     metadata = MetaData()
     self.engine = create_engine('mysql://*****:*****@localhost:3006/games_online', encoding='utf-8',pool_recycle=7200,pool_size=15,max_overflow=30)
     # self._dbSession = scoped_session(
     #    sessionmaker(
     #        bind=self.engine
     #    )
     # )
     self.games_info = Table('games_info', metadata,
                             Column('id', INTEGER, primary_key=True),
                             Column('gamecode', VARCHAR(20)),
                             Column('language', VARCHAR(20)),
                             Column('sid_api', VARCHAR(255)),
                             Column('online_api', VARCHAR(255)))
     self.games_online = Table('games_online', metadata,
                               Column('id', INTEGER, primary_key=True),
                               Column('gamecode', VARCHAR(20)),
                               Column('language', VARCHAR(20)),
                               Column('region', VARCHAR(20)),
                               Column('serverid', INTEGER),
                               Column('online', INTEGER),
                               Column('time', INTEGER))
     self.games_triggers = Table('games_triggers', metadata,
                                 Column('id', INTEGER, primary_key=True),
                                 Column('gamecode', VARCHAR(20)),
                                 Column('language', VARCHAR(20)),
                                 Column('region', VARCHAR(20)),
                                 Column('serverid', INTEGER),
                                 Column('time', INTEGER))
     metadata.create_all(self.engine)
Esempio n. 16
0
    def test_cross_schema_reflection_five(self):
        meta1 = self.metadata

        # we assume 'public'
        default_schema = testing.db.dialect.default_schema_name
        subject = Table('subject', meta1,
                        Column('id', Integer, primary_key=True))
        referer = Table('referer', meta1,
                        Column('id', Integer, primary_key=True),
                        Column('ref', Integer, ForeignKey('subject.id')))
        meta1.create_all()

        meta2 = MetaData(testing.db)
        subject = Table('subject', meta2, autoload=True,
                        schema=default_schema,
                        postgresql_ignore_search_path=True
                        )
        referer = Table('referer', meta2, autoload=True,
                        schema=default_schema,
                        postgresql_ignore_search_path=True
                        )
        assert subject.schema == default_schema
        self.assert_((subject.c.id
                      == referer.c.ref).compare(
            subject.join(referer).onclause))
Esempio n. 17
0
    def test_reflection_with_unique_constraint(self):
        insp = inspect(testing.db)

        meta = self.metadata
        uc_table = Table('pgsql_uc', meta,
                         Column('a', String(10)),
                         UniqueConstraint('a', name='uc_a'))

        uc_table.create()

        # PostgreSQL will create an implicit index for a unique
        # constraint.   Separately we get both
        indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc'))
        constraints = set(i['name']
                          for i in insp.get_unique_constraints('pgsql_uc'))

        self.assert_('uc_a' in indexes)
        self.assert_('uc_a' in constraints)

        # reflection corrects for the dupe
        reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)

        indexes = set(i.name for i in reflected.indexes)
        constraints = set(uc.name for uc in reflected.constraints)

        self.assert_('uc_a' not in indexes)
        self.assert_('uc_a' in constraints)
Esempio n. 18
0
 def test_cross_schema_reflection_three(self):
     meta1 = self.metadata
     subject = Table('subject', meta1,
                     Column('id', Integer, primary_key=True),
                     schema='test_schema_2')
     referer = Table(
         'referer',
         meta1,
         Column(
             'id',
             Integer,
             primary_key=True),
         Column(
             'ref',
             Integer,
             ForeignKey('test_schema_2.subject.id')),
         schema='test_schema')
     meta1.create_all()
     meta2 = MetaData(testing.db)
     subject = Table('subject', meta2, autoload=True,
                     schema='test_schema_2')
     referer = Table('referer', meta2, autoload=True,
                     schema='test_schema')
     self.assert_((subject.c.id
                   == referer.c.ref).compare(
         subject.join(referer).onclause))
Esempio n. 19
0
    def test_cross_schema_reflection_four(self):
        meta1 = self.metadata
        subject = Table('subject', meta1,
                        Column('id', Integer, primary_key=True),
                        schema='test_schema_2')
        referer = Table(
            'referer',
            meta1,
            Column(
                'id',
                Integer,
                primary_key=True),
            Column(
                'ref',
                Integer,
                ForeignKey('test_schema_2.subject.id')),
            schema='test_schema')
        meta1.create_all()

        conn = testing.db.connect()
        conn.detach()
        conn.execute("SET search_path TO test_schema, test_schema_2")
        meta2 = MetaData(bind=conn)
        subject = Table('subject', meta2, autoload=True,
                        schema='test_schema_2',
                        postgresql_ignore_search_path=True)
        referer = Table('referer', meta2, autoload=True,
                        schema='test_schema',
                        postgresql_ignore_search_path=True)
        self.assert_((subject.c.id
                      == referer.c.ref).compare(
            subject.join(referer).onclause))
        conn.close()
Esempio n. 20
0
 def _get_non_cell0_mappings():
     """Queries the API database for non-cell0 cell mappings."""
     meta = MetaData(bind=db_session.get_api_engine())
     cell_mappings = Table('cell_mappings', meta, autoload=True)
     return cell_mappings.select().where(
         cell_mappings.c.uuid !=
             cell_mapping_obj.CellMapping.CELL0_UUID).execute().fetchall()
Esempio n. 21
0
 def test_mview_select(self):
     metadata = MetaData(testing.db)
     table = Table('test_mview', metadata, autoload=True)
     eq_(
         table.select().execute().fetchall(),
         [(89, 'd1',)]
     )
Esempio n. 22
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    volumes = Table('volumes', meta, autoload=True)

    # New table
    transfers = Table(
        'transfers', meta,
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean),
        Column('id', String(36), primary_key=True, nullable=False),
        Column('volume_id', String(length=36), ForeignKey('volumes.id'),
               nullable=False),
        Column('display_name', String(length=255)),
        Column('salt', String(length=255)),
        Column('crypt_hash', String(length=255)),
        Column('expires_at', DateTime(timezone=False)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    try:
        transfers.create()
    except Exception:
        LOG.error(_("Table |%s| not created!"), repr(transfers))
        raise
def upgrade(migrate_engine):
    meta.bind = migrate_engine
    instance_table = Table('instance', meta,
        Column('id', Integer, primary_key=True),
        Column('key', Unicode(20), nullable=False, unique=True),
        Column('label', Unicode(255), nullable=False),
        Column('description', UnicodeText(), nullable=True),
        Column('required_majority', Float, nullable=False),
        Column('activation_delay', Integer, nullable=False),
        Column('create_time', DateTime, default=func.now()),
        Column('access_time', DateTime, default=func.now(), onupdate=func.now()),
        Column('delete_time', DateTime, nullable=True),
        Column('creator_id', Integer, ForeignKey('user.id'), nullable=False),
        Column('default_group_id', Integer, ForeignKey('group.id'), nullable=True),
        Column('allow_adopt', Boolean, default=True),       
        Column('allow_delegate', Boolean, default=True),
        Column('allow_index', Boolean, default=True),
        Column('hidden', Boolean, default=False),
        Column('locale', Unicode(7), nullable=True),
        Column('css', UnicodeText(), nullable=True),
        Column('use_norms', Boolean, nullable=True, default=True)
    )
    
    propose = Column('allow_propose', Boolean, default=True)
    propose.create(instance_table)
    u = instance_table.update(values={'allow_propose': True})
    migrate_engine.execute(u)
    def test_auto_append_constraint(self):
        m = MetaData()

        t = Table('tbl', m,
                  Column('a', Integer),
                  Column('b', Integer)
        )

        t2 = Table('t2', m,
                Column('a', Integer),
                Column('b', Integer)
        )

        for c in (
            UniqueConstraint(t.c.a),
            CheckConstraint(t.c.a > 5),
            ForeignKeyConstraint([t.c.a], [t2.c.a]),
            PrimaryKeyConstraint(t.c.a)
        ):
            assert c in t.constraints
            t.append_constraint(c)
            assert c in t.constraints

        c = Index('foo', t.c.a)
        assert c in t.indexes
    def test_tometadata_ok(self):
        m = MetaData()

        t = Table('tbl', m,
                  Column('a', Integer),
                  Column('b', Integer)
        )

        t2 = Table('t2', m,
                Column('a', Integer),
                Column('b', Integer)
        )

        UniqueConstraint(t.c.a)
        CheckConstraint(t.c.a > 5)
        ForeignKeyConstraint([t.c.a], [t2.c.a])
        PrimaryKeyConstraint(t.c.a)

        m2 = MetaData()

        t3 = t.tometadata(m2)

        eq_(len(t3.constraints), 4)

        for c in t3.constraints:
            assert c.table is t3
Esempio n. 26
0
 def test_checksfor_sequence(self):
     meta1 = self.metadata
     seq = Sequence("fooseq")
     t = Table("mytable", meta1, Column("col1", Integer, seq))
     seq.drop()
     testing.db.execute("CREATE SEQUENCE fooseq")
     t.create(checkfirst=True)
Esempio n. 27
0
def upgrade(migrate_engine):
    """Add workers table."""
    meta = MetaData()
    meta.bind = migrate_engine

    workers = Table(
        'workers', meta,
        # Inherited fields from CinderBase
        Column('created_at', DateTime(timezone=False)),
        Column('updated_at', DateTime(timezone=False)),
        Column('deleted_at', DateTime(timezone=False)),
        Column('deleted', Boolean(), default=False),

        # Workers table specific fields
        Column('id', Integer, primary_key=True),
        Column('resource_type', String(40), nullable=False),
        Column('resource_id', String(36), nullable=False),
        Column('status', String(255), nullable=False),
        Column('service_id', Integer, nullable=True),
        UniqueConstraint('resource_type', 'resource_id'),

        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    workers.create()

    services = Table('services', meta, autoload=True)

    ForeignKeyConstraint(
        columns=[workers.c.service_id],
        refcolumns=[services.c.id]).create()
Esempio n. 28
0
File: sql.py Progetto: wylee/gimlet
class SQLBackend(BaseBackend):

    def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
        meta = MetaData(bind=create_engine(url, **engine_kwargs))
        self.table = Table(table_name, meta,
                           Column('id', types.Integer, primary_key=True),
                           Column('key', types.CHAR(32), nullable=False,
                                  unique=True),
                           Column('data', types.LargeBinary, nullable=False))
        self.table.create(checkfirst=True)

    def __setitem__(self, key, value):
        table = self.table
        key_col = table.c.key
        raw = self.serialize(value)
        # Check if this key exists with a SELECT FOR UPDATE, to protect
        # against a race with other concurrent writers of this key.
        r = table.count(key_col == key, for_update=True).scalar()
        if r:
            # If it exists, use an UPDATE.
            table.update().values(data=raw).where(key_col == key).execute()
        else:
            # Otherwise INSERT.
            table.insert().values(key=key, data=raw).execute()

    def __getitem__(self, key):
        raw = select([self.table.c.data], self.table.c.key == key).scalar()
        if raw:
            return self.deserialize(raw)
        else:
            raise KeyError('key %r not found' % key)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    virtual_interfaces = Table('virtual_interfaces', meta, autoload=True)

    virtual_interfaces.drop_column('uuid')
Esempio n. 30
0
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    user_table = Table('user', meta, autoload=True)
    instance_table = Table('instance', meta, autoload=True)
    delegateable_table = Table('delegateable', meta, autoload=True)

    milestone_table = Table('milestone', meta,
        Column('id', Integer, primary_key=True),
        Column('instance_id', Integer, ForeignKey('instance.id'), nullable=False),
        Column('creator_id', Integer, ForeignKey('user.id'), nullable=False),
        Column('title', Unicode(255), nullable=True),
        Column('text', UnicodeText(), nullable=True),
        Column('time', DateTime),
        Column('create_time', DateTime, default=datetime.utcnow),
        Column('delete_time', DateTime)
        )
    milestone_table.create()
    ms_col = Column('milestone_id', Integer, ForeignKey('milestone.id'), nullable=True)
    ms_col.create(delegateable_table)

    ms_bool = Column('milestones', Boolean, default=False)
    ms_bool.create(instance_table)
    u = instance_table.update(values={'milestones': False})
    migrate_engine.execute(u)
Esempio n. 31
0
class TaskDB(SplitTableMixin, BaseTaskDB):
    __tablename__ = 'taskdb'

    def __init__(self, url):
        self.table = Table('__tablename__',
                           MetaData(),
                           Column('taskid',
                                  String(64),
                                  primary_key=True,
                                  nullable=False),
                           Column('project', String(64)),
                           Column('url', String(1024)),
                           Column('status', Integer),
                           Column('schedule', LargeBinary),
                           Column('fetch', LargeBinary),
                           Column('process', LargeBinary),
                           Column('track', LargeBinary),
                           Column('lastcrawltime', Float(32)),
                           Column('updatetime', Float(32)),
                           mysql_engine='InnoDB',
                           mysql_charset='utf8')

        self.url = make_url(url)
        if self.url.database:
            database = self.url.database
            self.url.database = None
            try:
                engine = create_engine(self.url,
                                       convert_unicode=True,
                                       pool_recycle=3600)
                conn = engine.connect()
                conn.execute("commit")
                conn.execute("CREATE DATABASE %s" % database)
            except sqlalchemy.exc.SQLAlchemyError:
                pass
            self.url.database = database
        self.engine = create_engine(url,
                                    convert_unicode=True,
                                    pool_recycle=3600)

        self._list_project()

    def _create_project(self, project):
        assert re.match(r'^\w+$', project) is not None
        if project in self.projects:
            return
        self.table.name = self._tablename(project)
        Index('status_%s_index' % self.table.name, self.table.c.status)
        self.table.create(self.engine, checkfirst=True)
        self.table.indexes.clear()

    @staticmethod
    def _parse(data):
        for key, value in list(six.iteritems(data)):
            if isinstance(value, six.binary_type):
                data[key] = utils.text(value)
        for each in ('schedule', 'fetch', 'process', 'track'):
            if each in data:
                if data[each]:
                    if isinstance(data[each], bytearray):
                        data[each] = str(data[each])
                    data[each] = json.loads(data[each])
                else:
                    data[each] = {}
        return data

    @staticmethod
    def _stringify(data):
        for each in ('schedule', 'fetch', 'process', 'track'):
            if each in data:
                data[each] = utils.utf8(json.dumps(data[each]))
        return data

    def load_tasks(self, status, project=None, fields=None):
        if project and project not in self.projects:
            return

        if project:
            projects = [
                project,
            ]
        else:
            projects = self.projects

        columns = [getattr(self.table.c, f, f)
                   for f in fields] if fields else self.table.c
        for project in projects:
            self.table.name = self._tablename(project)
            for task in self.engine.execute(
                    self.table.select().with_only_columns(columns).where(
                        self.table.c.status == status)):
                yield self._parse(result2dict(columns, task))

    def get_task(self, project, taskid, fields=None):
        if project not in self.projects:
            self._list_project()
        if project not in self.projects:
            return None

        self.table.name = self._tablename(project)
        columns = [getattr(self.table.c, f, f)
                   for f in fields] if fields else self.table.c
        for each in self.engine.execute(
                self.table.select().with_only_columns(columns).limit(1).where(
                    self.table.c.taskid == taskid)):
            return self._parse(result2dict(columns, each))

    def status_count(self, project):
        result = dict()
        if project not in self.projects:
            self._list_project()
        if project not in self.projects:
            return result

        self.table.name = self._tablename(project)
        for status, count in self.engine.execute(
                self.table.select().with_only_columns(
                    (self.table.c.status,
                     func.count(1))).group_by(self.table.c.status)):
            result[status] = count
        return result

    def insert(self, project, taskid, obj={}):
        if project not in self.projects:
            self._list_project()
        if project not in self.projects:
            self._create_project(project)
            self._list_project()
        obj = dict(obj)
        obj['taskid'] = taskid
        obj['project'] = project
        obj['updatetime'] = time.time()
        self.table.name = self._tablename(project)
        return self.engine.execute(
            self.table.insert().values(**self._stringify(obj)))

    def update(self, project, taskid, obj={}, **kwargs):
        if project not in self.projects:
            self._list_project()
        if project not in self.projects:
            raise LookupError
        self.table.name = self._tablename(project)
        obj = dict(obj)
        obj.update(kwargs)
        obj['updatetime'] = time.time()
        return self.engine.execute(self.table.update().where(
            self.table.c.taskid == taskid).values(**self._stringify(obj)))
def table_events():
    return Table('events', metadata, autoload=True)
def table_stocks_mktdata():
    return Table('stocks_mktdata', metadata, autoload=True)
Esempio n. 34
0
# encoding: utf-8

from sqlalchemy import types, Table, Column
import vdm.sqlalchemy
import datetime

from ckan.model import meta, core, types as _types, domain_object

jsonpatch_table = Table(
    'jsonpatch',
    meta.metadata,
    Column('id', types.UnicodeText, primary_key=True,
           default=_types.make_uuid),
    Column('model_name', types.UnicodeText, nullable=False),
    Column('object_id', types.UnicodeText, nullable=False),
    Column('operation', _types.JsonDictType),
    Column('scope', types.UnicodeText),
    Column('ordinal', types.Integer, nullable=False, default=0),
    Column('timestamp',
           types.DateTime,
           nullable=False,
           default=datetime.datetime.utcnow),
    Column('data', _types.JsonDictType),
)

vdm.sqlalchemy.make_table_stateful(jsonpatch_table)
jsonpatch_revision_table = core.make_revisioned_table(jsonpatch_table)


class JSONPatch(vdm.sqlalchemy.RevisionedObjectMixin,
                vdm.sqlalchemy.StatefulObjectMixin,
                domain_object.DomainObject):
 def setUp(self):
     meta = MetaData(bind=METRO_ENGINE_PG)
     self.table = Table('files', meta, autoload=True, schema='transport')
     self.some_date = date(year=2100, month=1, day=1)
def downgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    table = Table('volume_usage_cache', meta, autoload=True)
    col_resource = getattr(table.c, 'user_id')
    col_resource.alter(type=String(36))
Esempio n. 37
0
import asyncio

from aiomysql.sa import create_engine, result
from sqlalchemy import MetaData, Table, Column, BigInteger, String, Boolean, BLOB, Integer

jobs_table = Table(
    'jobs',
    MetaData(),
    Column('id', BigInteger, primary_key=True, nullable=False),
    Column('target', String(50), nullable=False),
    Column('url', String(2048), nullable=True),
    Column('query', String(2048), nullable=True),
    Column('geo_location', String(150), nullable=True),
    Column('domain', String(9), nullable=True),
    Column('parse', Boolean),
    Column('status', String(50)),
)

job_results_table = Table(
    'job_results',
    MetaData(),
    Column('id', BigInteger, primary_key=True, nullable=False),
    Column('internal_id', BigInteger, nullable=False),
    Column('job_id', BigInteger, nullable=False),
    Column('content', BLOB),
    Column('page', Integer),
    Column('url', String(2048)),
)


class Session:
Esempio n. 38
0
import psycopg2
from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData, ForeignKey
from sqlalchemy.orm import sessionmaker

engine = create_engine(
    'postgresql://*****:*****@localhost:5432/questrade_api', )

metadata = MetaData()
users = Table('users', metadata, Column('id', Integer, primary_key=True),
              Column('name', String))

metadata.create_all(engine)

conn = psycopg2.connect(dbname='test',
                        user='******',
                        host='localhost',
                        password='******')
create_table_query = '''CREATE TABLE mobile3
          (ID INT PRIMARY KEY     NOT NULL,
          MODEL           TEXT    NOT NULL,
          PRICE         REAL); '''
cursor = conn.cursor()
cursor.execute(create_table_query)
conn.commit()
cursor.close()
conn.close()
Esempio n. 39
0
import enum
import sys
from datetime import datetime

from sqlalchemy import (Column, DateTime, Enum, ForeignKey, Integer, Table,
                        Unicode, UniqueConstraint)
from sqlalchemy.orm import backref, relationship

from base import Base

association_table_post_tag = Table(
    'association_post_tag', Base.metadata,
    Column('post_id', Integer, ForeignKey('post.id')),
    Column('tag_id', Integer, ForeignKey('tag.id')))

association_table_tag_user = Table(
    'association_tag_user', Base.metadata,
    Column('user_id', Integer, ForeignKey('user.id')),
    Column('tag_id', Integer, ForeignKey('tag.id')))

association_table_group_user = Table(
    'association_group_user', Base.metadata,
    Column('user_id', Integer, ForeignKey('user.id')),
    Column('group_id', Integer, ForeignKey('group.id')))


class Group(Base):

    __tablename__ = 'group'

    id = Column(Integer, primary_key=True)
Esempio n. 40
0
def match(db_uri=None, in_dir=None):
    """ Check and fix matches of bibitem strings to MAG IDs
    """

    if not (db_uri or in_dir):
        print('need either DB URI or input directory path')
        return False
    if in_dir:
        db_path = os.path.join(in_dir, 'metadata.db')
        db_uri = 'sqlite:///{}'.format(os.path.abspath(db_path))
    print('Setting up preliminary bibitem DB connection')
    engine = create_engine(db_uri)

    print('Querying bibitem DB')
    bibitem_tuples = engine.execute(
        ('select bibitem.uuid, in_doc, bibitem_string, mag_id from bibitem'
         ' join bibitemmagidmap on bibitem.uuid = bibitemmagidmap.uuid'
         '')).fetchall()
    #     '')).fetchmany(100)

    Base.metadata.create_all(engine)
    Base.metadata.bind = engine
    DBSession = sessionmaker(bind=engine)
    session = DBSession()

    # set up MAG DB
    print('setting up MAG DB')
    MAGBase = declarative_base()

    mag_db_uri = 'postgresql+psycopg2://mag:1maG$@localhost:5432/MAG'
    mag_engine = create_engine(
        mag_db_uri, connect_args={'options': '-c statement_timeout=60000'})
    MAGBase.metadata.create_all(mag_engine)
    MAGBase.metadata.bind = mag_engine
    MAGDBSession = sessionmaker(bind=mag_engine)
    mag_session = MAGDBSession()

    MAGPaper = Table('papers',
                     MAGBase.metadata,
                     autoload=True,
                     autoload_with=mag_engine)
    MAGPaper = Table('papers',
                     MAGBase.metadata,
                     autoload=True,
                     autoload_with=mag_engine)
    # /set up MAG DB

    total = len(bibitem_tuples)
    num_changed = 0
    for bi_idx, bibitem_tuple in enumerate(bibitem_tuples):
        # for each matched bibitem
        uuid = bibitem_tuple[0]
        in_doc = bibitem_tuple[1]
        bibitem_string = bibitem_tuple[2]
        mid = bibitem_tuple[3]
        bibitem_string_normalized = mag_normalize(bibitem_string)
        # get all papers w/ identical title
        candidates = MAG_same_title_papers(mag_engine, mid)
        if len(candidates) < 2:
            continue
        # print('- - - - - - - - -')
        # print(bibitem_string_normalized)
        # print('- - - - - - - - -')
        # print('{} candidates'.format(len(candidates)))
        good_candidates = []
        for c in candidates:
            author_names = MAG_paper_authors(mag_engine, c[0])
            # print('    {} ({})'.format(c, author_names))
            for name in author_names:
                if name in bibitem_string_normalized:
                    good_candidates.append(c)
                    break
        # print()
        # print('{} good_candidates'.format(len(good_candidates)))
        # print('{}'.format(good_candidates))
        if len(good_candidates) == 0:
            continue
        elif len(good_candidates) == 1:
            choice = good_candidates[0]
        else:
            good_candidates = sorted(good_candidates,
                                     key=itemgetter(1),
                                     reverse=True)
            choice = good_candidates[0]
        # print()
        # print('choice: {}'.format(choice))
        # input()
        # print()
        new_mid = str(choice[0])
        magIDmap_db = session.query(BibitemMAGIDMap).\
            filter_by(uuid=uuid).first()
        if magIDmap_db.mag_id != new_mid:
            num_changed += 1
            magIDmap_db.mag_id = new_mid
            session.flush()
        if bi_idx % 1000 == 0:
            print('{}/{} ({} updated)'.format(bi_idx, total, num_changed))
            session.commit()
    return 'done'
Esempio n. 41
0
def zoeken(m_email):
    import validZt
    class Widget(QDialog):
        def __init__(self, parent=None):
            super(Widget, self).__init__(parent)
            self.setWindowTitle("Cluster -> Artikelen bereik")
            self.setWindowIcon(QIcon('./images/logos/logo.jpg'))
    
            self.setFont(QFont('Arial', 10))
            
            self.Clusternummer = QLabel()
            clEdit = QLineEdit()
            clEdit.setFixedWidth(210)
            font = QFont("Arial",10)
            font.setCapitalization(QFont.AllUppercase)
            clEdit.setFont(font)
            reg_ex = QRegExp('^[L-Tl-t]{1}[A-Za-z]{1}[0-9]{5}')
            input_validator = QRegExpValidator(reg_ex, clEdit)
            clEdit.setValidator(input_validator)
            clEdit.textChanged.connect(self.clChanged)
                                
            self.Keuze = QLabel()
            k0Edit = QComboBox()
            k0Edit.setFixedWidth(320)
            k0Edit.setFont(QFont("Arial",10))
            k0Edit.setStyleSheet("color: black;  background-color: #F8F7EE")
            k0Edit.addItem('                  Zoeken Artikelen')
            k0Edit.addItem('1. Alle Artikelen')
            k0Edit.addItem('2. Filter op artikelnummers')
            k0Edit.addItem('3. Filter op artikelomschrijving')
            k0Edit.activated[str].connect(self.k0Changed)
    
            self.Zoekterm = QLabel()
            zktermEdit = QLineEdit()
            zktermEdit.setFixedWidth(210)
            zktermEdit.setFont(QFont("Arial",10))
            reg_ex = QRegExp('^.{0,20}$')
            input_validator = QRegExpValidator(reg_ex, zktermEdit)
            zktermEdit.setValidator(input_validator)
            zktermEdit.textChanged.connect(self.zktermChanged)
     
            grid = QGridLayout()
            grid.setSpacing(20)
                          
            lbl = QLabel()
            pixmap = QPixmap('./images/logos/verbinding.jpg')
            lbl.setPixmap(pixmap)
            grid.addWidget(lbl , 1, 0, 1, 2)
            
            lbl2 = QLabel('Clusternummer')  
            lbl2.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
            grid.addWidget(lbl2, 2, 0)
            grid.addWidget(clEdit, 2, 1)
            
            grid.addWidget(k0Edit, 3, 0, 1, 2, Qt.AlignRight)
            lbl1 = QLabel('Zoekterm')  
            lbl1.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
            grid.addWidget(lbl1, 4, 0)
            grid.addWidget(zktermEdit, 4, 1)
            
            self.setLayout(grid)
            self.setGeometry(500, 300, 150, 150)
            
            grid.addWidget(QLabel('\u00A9 2017 all rights reserved [email protected]'), 6, 0, 1, 3)
            
            logo = QLabel()
            pixmap = QPixmap('./images/logos/logo.jpg')
            logo.setPixmap(pixmap)
            grid.addWidget(logo , 1, 1, 1, 1, Qt.AlignRight)
   
            applyBtn = QPushButton('Zoeken')
            applyBtn.clicked.connect(self.accept)
    
            grid.addWidget(applyBtn, 5, 1 , 1 ,1, Qt.AlignRight)
            applyBtn.setFont(QFont("Arial",10))
            applyBtn.setFixedWidth(100)
            applyBtn.setStyleSheet("color: black;  background-color: gainsboro")
            
            cancelBtn = QPushButton('Sluiten')
            cancelBtn.clicked.connect(lambda: windowSluit(self, m_email))
    
            grid.addWidget(cancelBtn, 5, 1, 1, 2)
            cancelBtn.setFont(QFont("Arial",10))
            cancelBtn.setFixedWidth(100)
            cancelBtn.setStyleSheet("color: black;  background-color: gainsboro")

        def clChanged(self, text):
            self.Clusternummer.setText(text)
              
        def k0Changed(self, text):
            self.Keuze.setText(text)
            
        def zktermChanged(self, text):
            self.Zoekterm.setText(text)
        
        def returnClusternummer(self):
            return self.Clusternummer.text()
         
        def returnKeuze(self):
            return self.Keuze.text()
        
        def returnZoekterm(self):
            return self.Zoekterm.text()
        
        @staticmethod
        def getData(parent=None):
            dialog = Widget(parent)
            dialog.exec_()
            return [dialog.returnClusternummer(), dialog.returnKeuze(),\
                    dialog.returnZoekterm()]       

    window = Widget()
    data = window.getData()
    zoekterm = ''
    keuze = ''
    metadata = MetaData()
    iclusters = Table('iclusters', metadata,
        Column('iclusterID', Integer, primary_key=True),
        Column('omschrijving', String))
    engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem')
    con = engine.connect()
    selcl = select([iclusters]).where(iclusters.c.iclusterID == str(data[0]).upper())
    rpcl = con.execute(selcl).first()
    if rpcl:
        momschr = rpcl[1]
    else:
        foutCluster()
        zoeken(m_email)
    if data[0]:
        clusternr = str(data[0]).upper()
    if not data[1] or data[1][0] == ' ':
        ongInvoer()
        zoeken(m_email)
    if not data[1] or data[1][0] == ' ':
        ongInvoer()
        zoeken(m_email)
    elif data[1][0] == '1':
        keuze = '1'
    elif data[1][0] == '2' and validZt.zt(data[2], 2):
        keuze = '2'
        zoekterm = data[2]
    elif data[1][0] == '3' and data[1]:
        keuze = '3'
        zoekterm = data[2]
    else:
        ongInvoer()
        zoeken(m_email)
    toonArtikelen(keuze, zoekterm, m_email, momschr, clusternr)   
Esempio n. 42
0
def init_tables():
    """Initializes the filings and companies database tables"""
    # Ask for confirmation before dropping tables
    accept = False
    while not accept:
        usr_input = input(
            "Are you sure you want to reinitialize the db? THIS WILL WIPE THE CURRENT DB. ALL DATA WILL BE LOST! [Y/N]:"
        )
        if usr_input == 'Y' or usr_input == 'y':
            accept = True
            db_cred = f"{DB_USERNAME}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
            engine = create_engine(f"postgresql+psycopg2://{db_cred}")
            metadata = MetaData(bind=engine)

    with engine.connect() as connection:
        # Dropping the tables if they already exist
        for table in [
                'companies', 'filings', 'filing_types', 'unscraped_filings'
        ]:
            connection.execute(
                "DROP TABLE IF EXISTS {} CASCADE;".format(table))

        # Init tables
        companies = Table('companies', metadata,
                          Column('permno', Integer(), primary_key=True),
                          Column('ticker', String(10), nullable=False),
                          Column('cik', Integer(), nullable=True),
                          Column('business_name', String(100), nullable=False))

        filing_types = Table('filing_types', metadata,
                             Column('type_id', Integer(), primary_key=True),
                             Column('type', Text(), nullable=False),
                             Column('keep', Integer(), nullable=False))

        filings = Table('filings', metadata,
                        Column('filing_id', Integer(), primary_key=True),
                        Column('cik', Integer(), nullable=False),
                        Column('business_name', Text(), nullable=True),
                        Column('type', Text(), nullable=True),
                        Column('path', Text(), nullable=True),
                        Column('date', Date(), nullable=True),
                        Column('text', Text(), nullable=True),
                        Column('token', TSVECTOR(), nullable=True))

        metadata.create_all(engine)

        # Enable full text search
        # Create Gin Index
        stmt = """CREATE INDEX idx_filing ON filings USING gin(token);"""
        connection.execute(stmt)

        stmt = """
        CREATE TRIGGER update_index
        BEFORE UPDATE OR INSERT
        ON filings
        FOR EACH ROW
        EXECUTE PROCEDURE
        tsvector_update_trigger(token, 'pg_catalog.english', text)
        """
        connection.execute(stmt)

        return engine
Esempio n. 43
0
#!/usr/bin/env python
# coding: utf-8

# In[1]:


from datetime import datetime

from sqlalchemy import (MetaData, Table, Column, Integer, Numeric, String,
                       DateTime, ForeignKey, Boolean, create_engine)
metadata = MetaData()

cookies = Table('cookies', metadata,
               Column('cookie_id', Integer(), primary_key=True),
               Column('cookie_name', String(50), index=True),
               Column('cookie_recipe_url', String(255)),
               Column('cookie_sku', String(55)),
                Column('quantity', Integer()),
                Column('unit_cost', Numeric(12, 2))
              )

users = Table('users', metadata,
             Column('user_id', Integer(), primary_key=True),
             Column('username', String(15), nullable=False, unique=True),
             Column('email_address', String(255), nullable=False),
             Column('phone', String(20), nullable=False),
             Column('password', String(25), nullable=False),
             Column('created_on', DateTime(), default=datetime.now),
             Column('updated_on', DateTime(), default=datetime.now, onupdate=datetime.now)
         )

orders = Table('orders', metadata,
Esempio n. 44
0
                                "(install with `pip install sqlalchemy`)")

log = logging.getLogger(__name__)

SCHEMA_VERSION = "263a45963c72"
TASK_PENDING = "pending"
TASK_RUNNING = "running"
TASK_COMPLETED = "completed"
TASK_RECOVERED = "recovered"
TASK_REPORTED = "reported"
TASK_FAILED_ANALYSIS = "failed_analysis"
TASK_FAILED_PROCESSING = "failed_processing"

# Secondary table used in association Machine - Tag.
machines_tags = Table("machines_tags", Base.metadata,
    Column("machine_id", Integer, ForeignKey("machines.id")),
    Column("tag_id", Integer, ForeignKey("tags.id"))
)

# Secondary table used in association Task - Tag.
tasks_tags = Table("tasks_tags", Base.metadata,
    Column("task_id", Integer, ForeignKey("tasks.id")),
    Column("tag_id", Integer, ForeignKey("tags.id"))
)

class Machine(Base):
    """Configured virtual machines to be used as guests."""
    __tablename__ = "machines"

    id = Column(Integer(), primary_key=True)
    name = Column(String(255), nullable=False)
    label = Column(String(255), nullable=False)
Esempio n. 45
0
import json
import random

app = Flask(__name__)
api = Api(app)

engine = sqlalchemy.create_engine("mysql+pymysql://root:[email protected]:3306/word_chain", encoding="utf8",
                                  echo=False)
Base = sqlalchemy.ext.declarative.declarative_base()

# 利用Session对象连接数据库
DBSession = sqlalchemy.orm.sessionmaker(bind=engine)
session = DBSession()

word_wordList = Table('wordList_has_word', Base.metadata,
                      Column('wordList_id', Integer, ForeignKey('wordList.id'), primary_key=True),
                      Column('word_id', Integer, ForeignKey('word.id'), primary_key=True)
                      )

user_wordList = Table('user_like_wordList', Base.metadata,
                      Column('user_id', Integer, ForeignKey('user.id'), primary_key=True),
                      Column('wordList_id', Integer, ForeignKey('wordList.id'), primary_key=True)
                      )


class User(Base):
    __tablename__ = 'user'

    id = sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True, autoincrement=True)
    account = sqlalchemy.Column("account", sqlalchemy.String(50), nullable=False)
    password = sqlalchemy.Column("password", sqlalchemy.String(50), nullable=False)
    nickname = sqlalchemy.Column("nickname", sqlalchemy.String(50), nullable=False)
Esempio n. 46
0
def toonArtikelen(keuze,zoekterm, m_email, momschr, clusternr):             
    class MyWindow(QDialog):
        def __init__(self, data_list, header, *args):
            QWidget.__init__(self, *args,)
            self.setGeometry(100, 50, 1350, 900)
            self.setWindowTitle('Cluster Artikelen')
            self.setWindowIcon(QIcon('./images/logos/logo.jpg')) 
            self.setWindowFlags(self.windowFlags()| Qt.WindowSystemMenuHint |
                              Qt.WindowMinMaxButtonsHint) 
            table_model = MyTableModel(self, data_list, header)
            table_view = QTableView()
            table_view.setModel(table_model)
            font = QFont("Arial", 10)
            table_view.setFont(font)
            table_view.resizeColumnsToContents()
            table_view.setSelectionBehavior(QTableView.SelectRows)
            table_view.clicked.connect(showSelection)
            layout = QVBoxLayout(self)
            layout.addWidget(table_view)
            self.setLayout(layout)
    
    class MyTableModel(QAbstractTableModel):
        def __init__(self, parent, mylist, header, *args):
            QAbstractTableModel.__init__(self, parent, *args)
            self.mylist = mylist
            self.header = header
        def rowCount(self, parent):
            return len(self.mylist)
        def columnCount(self, parent):
            return len(self.mylist[0])
        def data(self, index, role):
            veld = self.mylist[index.row()][index.column()]
            if not index.isValid():
                return None
            elif role == Qt.TextAlignmentRole and (type(veld) == float or type(veld) == int):
                return Qt.AlignRight | Qt.AlignVCenter
            elif role != Qt.DisplayRole:
                return None
            if type(veld) == float:
                return '{:12.2f}'.format(veld)
            else:
                return veld
        def headerData(self, col, orientation, role):
            if orientation == Qt.Horizontal and role == Qt.DisplayRole:
                return self.header[col]
            return None
     
    header = ['Artikelnr', 'Omschrijving', 'Prijs', 'Voorraad', 'Eenheid',\
          'MinVrd', 'BestGr', 'Locatie', 'Groep', 'Categorie', 'Afmeting']

    metadata = MetaData()

    artikelen = Table('artikelen', metadata,
        Column('artikelID', Integer(), primary_key=True),
        Column('artikelomschrijving', String),
        Column('artikelprijs', Float),
        Column('art_voorraad', Float),
        Column('art_eenheid', String(20)),
        Column('art_min_voorraad', Float),
        Column('art_bestelgrootte', Float),
        Column('locatie_magazijn', String(10)),
        Column('artikelgroep', String),
        Column('categorie', String(10)),
        Column('afmeting', String))     
    engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem')
    con = engine.connect()
    if keuze == '1':
        sel = select([artikelen]).order_by(artikelen.c.artikelID)
    elif keuze == '2':
        zoekterm = zoekterm+''
        aanv = '200000000'
        suppl = '299999999'
        zoekbegin = (zoekterm+aanv)[0:9]
        zoekeind = (zoekterm+suppl)[0:9]
        sel = select([artikelen]).where(and_(artikelen.c.artikelID >= int(zoekbegin),\
                     artikelen.c.artikelID <= int(zoekeind)))\
                    .order_by(artikelen.c.artikelID)
    elif keuze == '3':
        sel = select([artikelen]).where(artikelen.c.artikelomschrijving.ilike('%'+zoekterm+'%'))\
                              .order_by(artikelen.c.artikelID)
    else:
        ongInvoer()
        zoeken(m_email)
    
    if con.execute(sel).fetchone():
        rp = con.execute(sel)
    else:
        geenRecord()
        zoeken(m_email)
    
    data_list=[]
    for row in rp:
        data_list += [(row)] 
    
    def showSelection(idx):
        artikelnr = idx.data()
        if idx.column() == 0:
            class MainWindow(QDialog):
                def __init__(self):
                    QDialog.__init__(self)
                       
                    grid = QGridLayout()
                    grid.setSpacing(20)
                                
                    self.lbl = QLabel()
                    self.pixmap = QPixmap('./images/logos/verbinding.jpg')
                    self.lbl.setPixmap(self.pixmap)
                    grid.addWidget(self.lbl , 0, 1, 1, 2)
            
                    self.logo = QLabel()
                    self.pixmap = QPixmap('./images/logos/logo.jpg')
                    self.logo.setPixmap(self.pixmap)
                    grid.addWidget(self.logo , 0, 2, 1, 1, Qt.AlignRight)
                                  
                    grid.addWidget(QLabel('Clusternummer               '+clusternr+\
                                        '\n'+momschr[:35]), 1, 1, 1, 3)
                        
                    self.setFont(QFont('Arial', 10))
                    grid.addWidget(QLabel('Artikelnummer               '+str(artikelnr)), 3, 1, 1, 3)
                                  
                    self.setWindowTitle("Clusters Samenstellen")
                    self.setWindowIcon(QIcon('./images/logos/logo.jpg'))
                    self.setFont(QFont('Arial', 10))
               
                    self.Hoeveelheid = QLabel(self)
                    self.Hoeveelheid.setText('Hoeveelheid ')
                    self.hoev = QLineEdit(self)
                    self.hoev.setFixedWidth(210)
                    reg_ex = QRegExp("^[-+]?[0-9]*\.?[0-9]+$")
                    input_validator = QRegExpValidator(reg_ex, self.hoev)
                    self.hoev.setValidator(input_validator)
                    
                    grid.addWidget(self.Hoeveelheid, 4, 1)
                    grid.addWidget(self.hoev, 4, 2)
                    
                    grid.addWidget(QLabel('\u00A9 2017 all rights reserved\n     [email protected]'), 6, 0, 1, 3, Qt.AlignCenter)
                    
                    self.applyBtn = QPushButton('Invoeren', self)
                    self.applyBtn.clicked.connect(self.clickMethod)
                    grid.addWidget(self.applyBtn, 5, 2, 1, 1, Qt.AlignRight)
                    self.applyBtn.setFont(QFont("Arial",10))
                    self.applyBtn.setFixedWidth(100)
                    self.applyBtn.setStyleSheet("color: black;  background-color: gainsboro")
                    
                    self.cancelBtn = QPushButton('Sluiten')
                    self.cancelBtn.clicked.connect(self.close)
                    grid.addWidget(self.cancelBtn, 5, 1, 1, 2, Qt.AlignCenter) 
                    self.cancelBtn.setFont(QFont("Arial",10))
                    self.cancelBtn.setFixedWidth(100)
                    self.cancelBtn.setStyleSheet("color: black;  background-color: gainsboro")
                        
                    self.setLayout(grid)
                    self.setGeometry(500, 100, 150, 150)
                                   
                def clickMethod(self):
                    mhoev = self.hoev.text()
                    if mhoev == '' or mhoev == '0':
                        return
                    mhoev = float(str(mhoev))
                    
                    metadata = MetaData()
                    
                    icluster_artikelen = Table('icluster_artikelen', metadata,
                        Column('icluster_artID', Integer, primary_key=True),
                        Column('iclusterID', None, ForeignKey('iclusters.iclusterID')),
                        Column('artikelID', None, ForeignKey('artikelen.artikelID')),
                        Column('hoeveelheid', Float))
                    artikelen = Table('artikelen', metadata,
                        Column('artikelID', Integer(), primary_key=True),
                        Column('artikelprijs', Float))
                    iclusters = Table('iclusters', metadata,
                        Column('iclusterID', Integer(), primary_key=True),
                        Column('materialen', Float))
                    engine = create_engine('postgresql+psycopg2://postgres@localhost/bisystem')
                    con = engine.connect()
                    selclart = select([icluster_artikelen]).where(and_(icluster_artikelen\
                      .c.iclusterID == clusternr, icluster_artikelen.c.artikelID == artikelnr))
                    rpclart = con.execute(selclart).first()
                    selart = select([artikelen]).where(artikelen.c.artikelID == artikelnr)
                    rpart = con.execute(selart).first()
                    martprijs = rpart[1]
                    updcl = update(iclusters).where(iclusters.c.iclusterID == clusternr).\
                      values(materialen = iclusters.c.materialen + martprijs*mhoev)
                    con.execute(updcl)
                    if rpclart:
                        updclart = update(icluster_artikelen).where(and_(icluster_artikelen.c.\
                         iclusterID == clusternr, icluster_artikelen.c.artikelID == artikelnr))\
                         .values(hoeveelheid = mhoev)
                        con.execute(updclart)
                        calcBestaat()
                    else:
                        mclartnr = (con.execute(select([func.max(icluster_artikelen.c.\
                            icluster_artID, type_=Integer).label('mclartnr')])).scalar())
                        mclartnr += 1
                        insclart = insert(icluster_artikelen).values(icluster_artID = mclartnr,\
                        iclusterID = clusternr, artikelID = artikelnr, hoeveelheid = mhoev)
                        con.execute(insclart)
                        invoerOK()
                    self.accept()
                    
            mainWin = MainWindow()
            mainWin.exec_()
 
    win = MyWindow(data_list, header)
    win.exec_()
Esempio n. 47
0
    String,
    Table,
    Text,
)
from sqlalchemy.orm import mapper, relationship
from src.models.books import Books
from src.models.reviews import Reviews
from src.models.users import Users

metadata = MetaData()

books = Table(
    "books",
    metadata,
    Column("id", Integer, primary_key=True, autoincrement=True),
    Column("isbn", String, nullable=False),
    Column("title", String, nullable=False),
    Column("author", String, nullable=False),
    Column("year", Integer, nullable=False),
)

users = Table(
    "users",
    metadata,
    Column("id", Integer, primary_key=True, autoincrement=True),
    Column("username", String, nullable=False, unique=True),
    Column("password_hash", String, nullable=False),
    Column("confirmed", Boolean, default=False),
)

reviews = Table(
from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
from sqlalchemy.orm import mapper, relationship, create_session
import sets

# this example illustrates a polymorphic load of two classes

metadata = MetaData('sqlite://')
metadata.bind.echo = True

# a table to store companies
companies = Table('companies', metadata, 
   Column('company_id', Integer, primary_key=True),
   Column('name', String(50)))

# we will define an inheritance relationship between the table "people" and "engineers",
# and a second inheritance relationship between the table "people" and "managers"
people = Table('people', metadata, 
   Column('person_id', Integer, primary_key=True),
   Column('company_id', Integer, ForeignKey('companies.company_id')),
   Column('name', String(50)),
   Column('type', String(30)))
   
engineers = Table('engineers', metadata, 
   Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
   Column('status', String(30)),
   Column('engineer_name', String(50)),
   Column('primary_language', String(50)),
  )
   
managers = Table('managers', metadata, 
   Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
Esempio n. 49
0
from xml.etree import ElementTree

engine = create_engine('sqlite://')
meta = MetaData(engine)


# setup a comparator for the PickleType since it's a mutable
# element.
def are_elements_equal(x, y):
    return x == y


# stores a top level record of an XML document.
# the "element" column will store the ElementTree document as a BLOB.
documents = Table('documents', meta,
                  Column('document_id', Integer, primary_key=True),
                  Column('filename', String(30), unique=True),
                  Column('element', PickleType(comparator=are_elements_equal)))

meta.create_all()


# our document class.  contains a string name,
# and the ElementTree root element.
class Document(object):
    def __init__(self, name, element):
        self.filename = name
        self.element = element


# setup mapper.
mapper(Document, documents)
Esempio n. 50
0
def create_tables(meta, con):
    coins = Table('coins', meta, Column('id', String, primary_key=True),
                  Column('symbol', String), Column('name', Integer),
                  Column('webpage', String), Column('github', String))
Esempio n. 51
0
class Novel(Base):
    __table__ = Table('NOVEL', metadata, auto_increment=True, autoload=True)
Esempio n. 52
0
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    try:
        RequestType_table = Table("request_type", metadata, autoload=True)
    except NoSuchTableError:
        RequestType_table = None
        log.debug("Failed loading table 'request_type'")
    if RequestType_table is None:
        return
    # load the sequencer table
    try:
        Sequencer_table = Table("sequencer", metadata, autoload=True)
    except NoSuchTableError:
        Sequencer_table = None
        log.debug("Failed loading table 'sequencer'")
    if Sequencer_table is None:
        return
    # create foreign key field to the sequencer table in the request_type table
    try:
        col = Column("sequencer_id",
                     Integer,
                     ForeignKey("sequencer.id"),
                     nullable=True)
        col.create(RequestType_table)
        assert col is RequestType_table.c.sequencer_id
    except Exception:
        log.exception(
            "Creating column 'sequencer_id' in the 'request_type' table failed."
        )
    # copy the sequencer information contained in the 'datatx_info' column
    # of the request_type table to the form values referenced in the sequencer table
    cmd = "SELECT id, name, datatx_info FROM request_type ORDER BY id ASC"
    result = migrate_engine.execute(cmd)
    results_list = result.fetchall()
    # Proceed only if request_types exists
    if len(results_list):
        # In this migration script the all the contents of the datatx_info are stored as form_values
        # with a pointer to the sequencer table. This way the sequencer information can be customized
        # by the admin and is no longer restricted to host, username, password, data directory.
        # For the existing request_types in the database, we add a new form_definition
        # with these 4 fields. Then we populate the sequencer table with unique datatx_info
        # column from the existing request_types.
        sequencer_form_definition_id = create_sequencer_form_definition(
            migrate_engine)
        sequencer_index = 1
        for row in results_list:
            request_type_id = row[0]
            sequencer_info = str(row[2])  # datatx_info column
            # skip if sequencer_info is empty
            if not sequencer_info.strip() or sequencer_info in [
                    'None', 'null'
            ]:
                continue
            sequencer_info = loads(sequencer_info.strip())
            # proceed only if sequencer_info is a valid dict
            if sequencer_info and isinstance(sequencer_info, dict):
                # check if this sequencer has already been added to the sequencer table
                sequencer_id = get_sequencer_id(migrate_engine, sequencer_info)
                if not sequencer_id:
                    # add to the sequencer table
                    sequencer_id = add_sequencer(migrate_engine,
                                                 sequencer_index,
                                                 sequencer_form_definition_id,
                                                 sequencer_info)
                # now update the sequencer_id column in request_type table
                update_sequencer_id_in_request_type(migrate_engine,
                                                    request_type_id,
                                                    sequencer_id)
                sequencer_index = sequencer_index + 1

    # Finally delete the 'datatx_info' column from the request_type table
    try:
        RequestType_table.c.datatx_info.drop()
    except Exception:
        log.exception(
            "Deleting column 'datatx_info' in the 'request_type' table failed."
        )
Esempio n. 53
0
        return Markup(
            '<a href="' + url_for(vn) + '" class="btn btn-sm btn-primary" data-toggle="tooltip" rel="tooltip"'+
            'title="Print">' +
            '<i class="fa fa-edit"></i>' +
            '</a>')
    id = Column(Integer, primary_key=True, autoincrement=True)
    law_firm = Column(ForeignKey(u'lawfirm.id'), index=True)
    barnumber = Column(String(20))

    lawfirm = relationship(u'Lawfirm', primaryjoin='Attorney.law_firm == Lawfirm.id', backref=u'attorneys')
    hearing = relationship(u'Hearing', secondary='attorney_hearing', backref=u'attorneys')


attorney_hearing = Table(
    'attorney_hearing', Model.metadata,
    Column('attorney', ForeignKey(u'attorney.id'), primary_key=True, nullable=False),
    Column('hearing', ForeignKey(u'hearing.id'), primary_key=True, nullable=False, index=True)
)


class Bail(AuditMixin, Model):
    __tablename__ = 'bail'

    id = Column(Integer, primary_key=True, autoincrement=True)
    hearing = Column(ForeignKey(u'hearing.id'), nullable=False, index=True)
    defendant = Column(ForeignKey(u'defendant.id'), nullable=False, index=True)
    amountgranted = Column(Numeric(12, 2), nullable=False)
    noofsureties = Column(Integer, nullable=False)
    paid = Column(Boolean, nullable=False)
    paydate = Column(Date, nullable=False)
    receiptno = Column(String(100), nullable=False)
Esempio n. 54
0
class Contents(Base):
    __table__ = Table('CONTENTS', metadata, auto_increment=True, autoload=True)
Esempio n. 55
0
        obj, made = get_or_create(session, cls, id=id, database_id=database_id)
        return obj

    def __repr__(self):
        return "<ReferenceAccessionNumber {} {}>".format(
            self.id, self.database.name)


class HasReferenceAccessionNumber(object):
    @declared_attr
    def references(cls):
        reference_number_association = Table(
            "%s_ReferenceAccessionNumber" % cls.__tablename__, cls.metadata,
            Column("accession_code", String(64), primary_key=True),
            Column("database_id", Integer, primary_key=True),
            Column("entity_id",
                   Integer,
                   ForeignKey("%s.id" % cls.__tablename__, ondelete="CASCADE"),
                   primary_key=True),
            ForeignKeyConstraint(["accession_code", "database_id"], [
                "ReferenceAccessionNumber.id",
                "ReferenceAccessionNumber.database_id"
            ]))
        cls.ReferenceAccessionAssocationTable = reference_number_association
        return relationship(ReferenceAccessionNumber,
                            secondary=reference_number_association)


TemplateNumberStore = Table("TemplateNumberStore", Base.metadata,
                            Column("value", Integer))
Esempio n. 56
0
	name = Column(String(50))

#****************************************************
# Define association tables and models for theme-album and mood-album relations.
# The table allows for db.relationship.secondary
# The model allows for /admin view

class ThemeAssociation(Base):
	__tablename__ = 'theme_associations'
	album_id = Column(String(50), primary_key=True,)
	theme_name = Column(String(50), primary_key=True)


theme_associations = Table('theme_associations', Base.metadata,
	Column('album_id', String(50), ForeignKey('album.id'), primary_key=True),	
	Column('theme_name', String(50), ForeignKey('theme.name'), primary_key=True),
	extend_existing=True
)


class MoodAssociation(Base):
	__tablename__ = 'mood_associations'
	album_id = Column(String(50), primary_key=True)
	mood_name = Column(String(50), primary_key=True)

mood_associations = Table('mood_associations', Base.metadata,
	Column('album_id', String(50), ForeignKey('album.id'), primary_key=True),
	Column('mood_name', String(50), ForeignKey('mood.name'), primary_key=True),
	extend_existing=True
)
Esempio n. 57
0
    CategoryId = Column(Integer, nullable=False)
    QuantityPerUnit = Column(String(8000))
    UnitPrice = Column(DECIMAL, nullable=False)
    UnitsInStock = Column(Integer, nullable=False)
    UnitsOnOrder = Column(Integer, nullable=False)
    ReorderLevel = Column(Integer, nullable=False)
    Discontinued = Column(Integer, nullable=False)


t_ProductDetails_V = Table('ProductDetails_V', metadata, Column('Id', Integer),
                           Column('ProductName', String(8000)),
                           Column('SupplierId', Integer),
                           Column('CategoryId', Integer),
                           Column('QuantityPerUnit', String(8000)),
                           Column('UnitPrice', DECIMAL),
                           Column('UnitsInStock', Integer),
                           Column('UnitsOnOrder', Integer),
                           Column('ReorderLevel', Integer),
                           Column('Discontinued', Integer),
                           Column('CategoryName', String(8000)),
                           Column('CategoryDescription', String(8000)),
                           Column('SupplierName', String(8000)),
                           Column('SupplierRegion', String(8000)))


class Region(BaseMixin, Model):
    __tablename__ = 'Region'

    Id = Column(Integer, primary_key=True)
    RegionDescription = Column(String(8000))

Esempio n. 58
0
def initialize():

    engine = create_engine('mysql://[email protected]:3306/weibo')
    metadata = MetaData(engine)

    # nuke
    # metadata.drop_all(bind=engine, tables=[WeiboStatusItem.__table__])
    WeiboStatusItem.__table__.drop(engine)
    MostRecentWeibo.__table__.drop(engine)

    # if not engine.dialect.has_table(engine, 'user'):  # If table don't exist, Create.
    #
    #     Table('user', metadata,
    #           Column('id', Integer, primary_key=True),
    #           Column('name', String(50)),
    #           Column('fullname', String(100))
    #           )

    # if not engine.dialect.has_table(engine, 'followees'):  # If table don't exist, Create.
    #
    #     Table('followees', metadata,
    #         Column('uid', VARCHAR(60), primary_key=True),
    #         Column('status_id', VARCHAR(60), primary_key=True),
    #         Column('creation_time', DATETIME, nullable=False, default=datetime.datetime.utcnow),
    #         Column('last_update_time', DATETIME, nullable=False, default=datetime.datetime.utcnow),
    #         Column('last_update_status', LONGTEXT(collation='utf8_bin'), nullable=True, default=None),
    #
    #         Column('followers_count', INTEGER),
    #         Column('followees_count', INTEGER),
    #         Column('status_count', INTEGER)
    #         )

    if not engine.dialect.has_table(
            engine, 'user_weibo_status'):  # If table don't exist, Create.

        Table(
            'user_weibo_status',
            metadata,
            Column('status_id', VARCHAR(60), primary_key=True),

            # Column('user_id', VARCHAR(60), ForeignKey('followees.uid')),
            # Column('user_name', VARCHAR(60), ForeignKey('followees.name')),
            Column('user_id', VARCHAR(60)),
            Column('user_name', VARCHAR(60)),
            Column('user_name', VARCHAR(60)),
            Column('creation_time', VARCHAR(60), nullable=False, default=''),
            Column('text', TEXT, nullable=True, default=None),
            Column('reposts_count', INTEGER),
            Column('attitudes_count', INTEGER),
            Column('comments_count', INTEGER),
            Column('source', TEXT),
        )

    if not engine.dialect.has_table(
            engine, 'user_most_recent_weibo'):  # If table don't exist, Create.

        Table('user_most_recent_weibo', metadata,
              Column('user_id', VARCHAR(60), primary_key=True),
              Column('user_name', VARCHAR(60)), Column('status_id',
                                                       VARCHAR(60)),
              Column('creation_time', VARCHAR(60), nullable=False, default=''),
              Column('text', TEXT, nullable=True, default=None))

    metadata.create_all()

    mgr = CrawlerSessionManager(db_config)

    mgr.populate_engines()

    return
Esempio n. 59
0
def make_sql_table(table,
                   table_name,
                   dialect=None,
                   db_schema=None,
                   constraints=True,
                   unique_constraint=[],
                   connection=None,
                   min_col_len=1,
                   col_len_multiplier=1):
    """
    Generates a SQL alchemy table from an agate table.
    """
    metadata = MetaData(connection)
    sql_table = Table(table_name, metadata, schema=db_schema)

    SQL_TYPE_MAP[agate.Boolean] = BOOLEAN_MAP.get(dialect, BOOLEAN)
    SQL_TYPE_MAP[agate.Number] = NUMBER_MAP.get(dialect, DECIMAL)
    SQL_TYPE_MAP[agate.TimeDelta] = INTERVAL_MAP.get(dialect, Interval)

    for column_name, column in table.columns.items():
        sql_column_type = None
        sql_type_kwargs = {}
        sql_column_kwargs = {}

        if constraints:
            if isinstance(column.data_type, agate.Text) and dialect == 'mysql':
                length = table.aggregate(agate.MaxLength(
                    column_name)) * decimal.Decimal(col_len_multiplier)
                if length > 21844:
                    # @see https://dev.mysql.com/doc/refman/5.7/en/string-type-overview.html
                    sql_column_type = TEXT
                else:
                    # If length is zero, SQLAlchemy may raise "VARCHAR requires a length on dialect mysql".
                    sql_type_kwargs[
                        'length'] = length if length >= min_col_len else min_col_len

            # PostgreSQL and SQLite don't have scale default 0.
            # @see https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
            # @see https://www.sqlite.org/datatype3.html
            if isinstance(
                    column.data_type,
                    agate.Number) and dialect in ('mssql', 'mysql', 'oracle'):
                # MySQL has precision range 1-65 and default 10, scale default 0.
                # @see https://dev.mysql.com/doc/refman/5.7/en/fixed-point-types.html
                # Oracle has precision range 1-38 and default 38, scale default 0.
                # @see https://docs.oracle.com/cd/B28359_01/server.111/b28318/datatype.htm#CNCPT1832
                # SQL Server has range 1-38 and default 18, scale default 0.
                # @see https://docs.microsoft.com/en-us/sql/t-sql/data-types/decimal-and-numeric-transact-sql
                sql_type_kwargs['precision'] = 38
                sql_type_kwargs['scale'] = table.aggregate(
                    agate.MaxPrecision(column_name))

            # Avoid errors due to NO_ZERO_DATE.
            # @see http://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_no_zero_date
            if not isinstance(column.data_type, agate.DateTime):
                sql_column_kwargs['nullable'] = table.aggregate(
                    agate.HasNulls(column_name))

        sql_table.append_column(
            make_sql_column(column_name, column, sql_type_kwargs,
                            sql_column_kwargs, sql_column_type))

    if unique_constraint:
        sql_table.append_constraint(UniqueConstraint(*unique_constraint))

    return sql_table
Esempio n. 60
0
        msg = "<Tag file:{!r} entity:{!r} value:{!r}>"
        return msg.format(self.file_path, self.entity_name, self.value)

    @reconstructor
    def _init_on_load(self):
        if self._dtype not in ('str', 'float', 'int', 'bool', 'json'):
            raise ValueError("Invalid dtype '{}'. Must be one of 'int', "
                             "'float', 'bool', 'str', or 'json'.".format(
                                 self._dtype))
        if self._dtype == 'json':
            self.value = json.loads(self._value)
            self.dtype = 'json'
        else:
            self.dtype = eval(self._dtype)
            self.value = self.dtype(self._value)


class FileAssociation(Base):
    __tablename__ = 'associations'

    src = Column(String, ForeignKey('files.path'), primary_key=True)
    dst = Column(String, ForeignKey('files.path'), primary_key=True)
    kind = Column(String, primary_key=True)


# Association objects
config_to_entity_map = Table(
    'config_to_entity_map', Base.metadata,
    Column('config', String, ForeignKey('configs.name')),
    Column('entity', String, ForeignKey('entities.name')))