예제 #1
0
    def test_tokens(self):
        m = MetaData()
        sane_alone = Table('t', m, Column('id', Integer))
        sane_schema = Table('t', m, Column('id', Integer), schema='s')
        insane_alone = Table('t t', m, Column('id', Integer))
        insane_schema = Table('t t', m, Column('id', Integer),
                              schema='s s')
        ddl = DDL('%(schema)s-%(table)s-%(fullname)s')
        dialect = self.mock_engine().dialect
        self.assert_compile(ddl.against(sane_alone), '-t-t',
                            dialect=dialect)
        self.assert_compile(ddl.against(sane_schema), 's-t-s.t',
                            dialect=dialect)
        self.assert_compile(ddl.against(insane_alone), '-"t t"-"t t"',
                            dialect=dialect)
        self.assert_compile(ddl.against(insane_schema),
                            '"s s"-"t t"-"s s"."t t"', dialect=dialect)

        # overrides are used piece-meal and verbatim.

        ddl = DDL('%(schema)s-%(table)s-%(fullname)s-%(bonus)s',
                  context={'schema': 'S S', 'table': 'T T', 'bonus': 'b'
                  })
        self.assert_compile(ddl.against(sane_alone), 'S S-T T-t-b',
                            dialect=dialect)
        self.assert_compile(ddl.against(sane_schema), 'S S-T T-s.t-b',
                            dialect=dialect)
        self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b',
                            dialect=dialect)
        self.assert_compile(ddl.against(insane_schema),
                            'S S-T T-"s s"."t t"-b', dialect=dialect)
예제 #2
0
    def test_ddl_execute(self):
        engine = create_engine('sqlite:///')
        cx = engine.connect()
        table = self.users
        ddl = DDL('SELECT 1')

        for py in ('engine.execute(ddl)',
                   'engine.execute(ddl, table)',
                   'cx.execute(ddl)',
                   'cx.execute(ddl, table)',
                   'ddl.execute(engine)',
                   'ddl.execute(engine, table)',
                   'ddl.execute(cx)',
                   'ddl.execute(cx, table)'):
            r = eval(py)
            assert list(r) == [(1,)], py

        for py in ('ddl.execute()',
                   'ddl.execute(schema_item=table)'):
            try:
                r = eval(py)
                assert False
            except tsa.exc.UnboundExecutionError:
                pass

        for bind in engine, cx:
            ddl.bind = bind
            for py in ('ddl.execute()',
                       'ddl.execute(schema_item=table)'):
                r = eval(py)
                assert list(r) == [(1,)], py
예제 #3
0
    def test_ddl_execute(self):
        engine = create_engine("sqlite:///")
        cx = engine.connect()
        table = self.users
        ddl = DDL("SELECT 1")

        for py in (
            "engine.execute(ddl)",
            "engine.execute(ddl, table)",
            "cx.execute(ddl)",
            "cx.execute(ddl, table)",
            "ddl.execute(engine)",
            "ddl.execute(engine, table)",
            "ddl.execute(cx)",
            "ddl.execute(cx, table)",
        ):
            r = eval(py)
            assert list(r) == [(1,)], py

        for py in ("ddl.execute()", "ddl.execute(target=table)"):
            try:
                r = eval(py)
                assert False
            except tsa.exc.UnboundExecutionError:
                pass

        for bind in engine, cx:
            ddl.bind = bind
            for py in ("ddl.execute()", "ddl.execute(target=table)"):
                r = eval(py)
                assert list(r) == [(1,)], py
예제 #4
0
    def test_tokens(self):
        m = MetaData()
        sane_alone = Table("t", m, Column("id", Integer))
        sane_schema = Table("t", m, Column("id", Integer), schema="s")
        insane_alone = Table("t t", m, Column("id", Integer))
        insane_schema = Table("t t", m, Column("id", Integer), schema="s s")
        ddl = DDL("%(schema)s-%(table)s-%(fullname)s")
        dialect = self.mock_engine().dialect
        self.assert_compile(ddl.against(sane_alone), "-t-t", dialect=dialect)
        self.assert_compile(
            ddl.against(sane_schema), "s-t-s.t", dialect=dialect
        )
        self.assert_compile(
            ddl.against(insane_alone), '-"t t"-"t t"', dialect=dialect
        )
        self.assert_compile(
            ddl.against(insane_schema),
            '"s s"-"t t"-"s s"."t t"',
            dialect=dialect,
        )

        # overrides are used piece-meal and verbatim.

        ddl = DDL(
            "%(schema)s-%(table)s-%(fullname)s-%(bonus)s",
            context={"schema": "S S", "table": "T T", "bonus": "b"},
        )
        self.assert_compile(
            ddl.against(sane_alone), "S S-T T-t-b", dialect=dialect
        )
        self.assert_compile(
            ddl.against(sane_schema), "S S-T T-s.t-b", dialect=dialect
        )
        self.assert_compile(
            ddl.against(insane_alone), 'S S-T T-"t t"-b', dialect=dialect
        )
        self.assert_compile(
            ddl.against(insane_schema),
            'S S-T T-"s s"."t t"-b',
            dialect=dialect,
        )
예제 #5
0
    def test_metadata(self):
        metadata, engine = self.metadata, self.engine

        event.listen(metadata, 'before_create', DDL('mxyzptlk'))
        event.listen(metadata, 'after_create', DDL('klptzyxm'))
        event.listen(metadata, 'before_drop', DDL('xyzzy'))
        event.listen(metadata, 'after_drop', DDL('fnord'))

        metadata.create_all()
        strings = [str(x) for x in engine.mock]
        assert 'mxyzptlk' in strings
        assert 'klptzyxm' in strings
        assert 'xyzzy' not in strings
        assert 'fnord' not in strings
        del engine.mock[:]
        metadata.drop_all()
        strings = [str(x) for x in engine.mock]
        assert 'mxyzptlk' not in strings
        assert 'klptzyxm' not in strings
        assert 'xyzzy' in strings
        assert 'fnord' in strings
예제 #6
0
    def test_metadata(self):
        metadata, engine = self.metadata, self.engine

        event.listen(metadata, "before_create", DDL("mxyzptlk"))
        event.listen(metadata, "after_create", DDL("klptzyxm"))
        event.listen(metadata, "before_drop", DDL("xyzzy"))
        event.listen(metadata, "after_drop", DDL("fnord"))

        metadata.create_all()
        strings = [str(x) for x in engine.mock]
        assert "mxyzptlk" in strings
        assert "klptzyxm" in strings
        assert "xyzzy" not in strings
        assert "fnord" not in strings
        del engine.mock[:]
        metadata.drop_all()
        strings = [str(x) for x in engine.mock]
        assert "mxyzptlk" not in strings
        assert "klptzyxm" not in strings
        assert "xyzzy" in strings
        assert "fnord" in strings
예제 #7
0
    def test_filter(self):
        cx = self.mock_engine()

        tbl = Table("t", MetaData(), Column("id", Integer))
        target = cx.name

        assert DDL("")._should_execute(tbl, cx)
        assert DDL("").execute_if(dialect=target)._should_execute(tbl, cx)
        assert not DDL("").execute_if(dialect="bogus")._should_execute(tbl, cx)
        assert (
            DDL("")
            .execute_if(callable_=lambda d, y, z, **kw: True)
            ._should_execute(tbl, cx)
        )
        assert (
            DDL("")
            .execute_if(
                callable_=lambda d, y, z, **kw: z.engine.name != "bogus"
            )
            ._should_execute(tbl, cx)
        )
예제 #8
0
    def test_metadata_deprecated(self):
        metadata, engine = self.metadata, self.engine

        DDL("mxyzptlk").execute_at("before-create", metadata)
        DDL("klptzyxm").execute_at("after-create", metadata)
        DDL("xyzzy").execute_at("before-drop", metadata)
        DDL("fnord").execute_at("after-drop", metadata)

        metadata.create_all()
        strings = [str(x) for x in engine.mock]
        assert "mxyzptlk" in strings
        assert "klptzyxm" in strings
        assert "xyzzy" not in strings
        assert "fnord" not in strings
        del engine.mock[:]
        metadata.drop_all()
        strings = [str(x) for x in engine.mock]
        assert "mxyzptlk" not in strings
        assert "klptzyxm" not in strings
        assert "xyzzy" in strings
        assert "fnord" in strings
예제 #9
0
 def fts_virtual_table(self) -> DDL:
     template = """
     CREATE VIRTUAL TABLE IF NOT EXISTS %(name)s
     USING fts5(%(columns)s, content=%(view_name)s, content_rowid=%(rowid_name)s)
     """
     info = {
         'name': self.idx_name,
         'columns': ', '.join([c.key for c in self.columns]),
         'view_name': self.view_name,
         'rowid_name': self.rowid_c.key,
     }
     return DDL(template % info)
예제 #10
0
    def test_metadata_deprecated(self):
        metadata, engine = self.metadata, self.engine

        DDL('mxyzptlk').execute_at('before-create', metadata)
        DDL('klptzyxm').execute_at('after-create', metadata)
        DDL('xyzzy').execute_at('before-drop', metadata)
        DDL('fnord').execute_at('after-drop', metadata)

        metadata.create_all()
        strings = [str(x) for x in engine.mock]
        assert 'mxyzptlk' in strings
        assert 'klptzyxm' in strings
        assert 'xyzzy' not in strings
        assert 'fnord' not in strings
        del engine.mock[:]
        metadata.drop_all()
        strings = [str(x) for x in engine.mock]
        assert 'mxyzptlk' not in strings
        assert 'klptzyxm' not in strings
        assert 'xyzzy' in strings
        assert 'fnord' in strings
예제 #11
0
    def test_filter_deprecated(self):
        cx = self.engine

        tbl = Table("t", MetaData(), Column("id", Integer))
        target = cx.name

        assert DDL("")._should_execute_deprecated("x", tbl, cx)
        with testing.expect_deprecated(".* is deprecated .*"):
            assert DDL("", on=target)._should_execute_deprecated("x", tbl, cx)
        with testing.expect_deprecated(".* is deprecated .*"):
            assert not DDL("", on="bogus")._should_execute_deprecated(
                "x", tbl, cx
            )
        with testing.expect_deprecated(".* is deprecated .*"):
            assert DDL(
                "", on=lambda d, x, y, z: True
            )._should_execute_deprecated("x", tbl, cx)
        with testing.expect_deprecated(".* is deprecated .*"):
            assert DDL(
                "", on=lambda d, x, y, z: z.engine.name != "bogus"
            )._should_execute_deprecated("x", tbl, cx)
예제 #12
0
def init():
  """Initializes the service."""
  # Create ES indexes.
  es = Elasticsearch(app.config['ELASTICSEARCH_URL'])
  for key in ['ELASTICSEARCH_GLOSSARY', 'ELASTICSEARCH_SIMILARITY']:
    try:
      if config_name == 'test':
        es.indices.delete(index=app.config[key], ignore=[400, 404])
      es.indices.create(index=app.config[key])
    except TransportError as e:
      # ignore already existing index
      if e.error == 'resource_already_exists_exception':
        pass
      else:
        raise
  es.indices.put_mapping(
    doc_type='_doc',
    body=json.load(open('./elasticsearch/alegre_glossary.json')),
    index=app.config['ELASTICSEARCH_GLOSSARY']
  )
  es.indices.put_mapping(
    doc_type='_doc',
    body=json.load(open('./elasticsearch/alegre_similarity.json')),
    index=app.config['ELASTICSEARCH_SIMILARITY']
  )
  es.indices.close(index=app.config['ELASTICSEARCH_SIMILARITY'])
  es.indices.put_settings(
    body=json.load(open('./elasticsearch/alegre_similarity_settings.json')),
    index=app.config['ELASTICSEARCH_SIMILARITY']
  )
  es.indices.open(index=app.config['ELASTICSEARCH_SIMILARITY'])

  # Create database.
  with app.app_context():
    if not database_exists(db.engine.url):
      create_database(db.engine.url)

    if config_name == 'test':
      db.drop_all()

    sqlalchemy.event.listen(
      db.metadata,
      'before_create',
      DDL("""
        CREATE OR REPLACE FUNCTION bit_count(value bigint)
        RETURNS integer
        AS $$ SELECT length(replace(value::bit(64)::text,'0','')); $$
        LANGUAGE SQL IMMUTABLE STRICT;
      """)
    )

    db.create_all()
예제 #13
0
    def test_platform_escape(self):
        """test the escaping of % characters in the DDL construct."""

        default_from = testing.db.dialect.statement_compiler(
            testing.db.dialect, None).default_from()

        eq_(
            testing.db.execute(text("select 'foo%something'" +
                                    default_from)).scalar(), 'foo%something')

        eq_(
            testing.db.execute(DDL("select 'foo%%something'" +
                                   default_from)).scalar(), 'foo%something')
예제 #14
0
 def define_temp_tables(cls, metadata):
     user_tmp = Table(
         "user_tmp", metadata,
         Column("id", sa.INT, primary_key=True),
         Column('name', sa.VARCHAR(50)),
         Column('foo', sa.INT),
         sa.UniqueConstraint('name', name='user_tmp_uq'),
         sa.Index("user_tmp_ix", "foo"),
         prefixes=["TEMPORARY"],
         schema='tmp',
     )
     if testing.requires.view_reflection.enabled and \
             testing.requires.temporary_views.enabled:
         event.listen(
             user_tmp, "after_create",
             DDL("create view user_tmp_v as "
                 "select * from tmp.user_tmp")
         )
         event.listen(
             user_tmp, "before_drop",
             DDL("drop view user_tmp_v")
         )
예제 #15
0
    def _create_model(self, model):
        model_name = model.__name__
        meta = type(self._base_declarative)
        if isinstance(model, meta):
            raise ImproperlyConfigured('Cannot register declarative classes '
                                       'only mixins allowed')
        base = getattr(model, '__inherit_from__', None)
        if base:
            if base not in self._declarative_register:
                models = self._bases.get(base)
                if not models:
                    self._bases[base] = models = []
                models.append(model)
                return
            else:
                base = self._declarative_register[base]
        else:
            base = self._base_declarative

        #
        # Create SqlAlchemy Model
        model = meta(model_name, (model, base), {})
        create = getattr(model, '__create_sql__', None)
        name = model_name.lower()
        if create:
            event.listen(self.metadata,
                         'after_create',
                         DDL(create.format({'name': name})))
            drop = getattr(model, '__drop_sql__', None)
            if not drop:
                logger.warning('Model %s has create statement but not drop. '
                               'To mute this warning add a __drop_sql__ '
                               'statement in the model class', name)
            else:
                event.listen(self.metadata,
                             'before_drop',
                             DDL(drop.format({'name': name})))

        return model, name
예제 #16
0
    def test_ddl_execute(self):
        engine = create_engine("sqlite:///")
        cx = engine.connect()
        table = self.users
        ddl = DDL("SELECT 1")

        for spec in (
            (cx.execute, ddl),
            (cx.execute, ddl, table),
        ):
            fn = spec[0]
            arg = spec[1:]
            r = fn(*arg)
            eq_(list(r), [(1, )])
예제 #17
0
 def _search_index_ddl(cls):
     """
     Returns the ddl for creating the actual search index.
     """
     tablename = cls.__tablename__
     search_vector_name = cls._get_search_option('search_vector_name')
     search_index_name = cls._get_search_option('search_index_name').format(
         table=tablename)
     return DDL("""
         CREATE INDEX {search_index_name} ON {table}
         USING gin({search_vector_name})
         """.format(table=quote_identifier(tablename),
                    search_index_name=search_index_name,
                    search_vector_name=search_vector_name))
예제 #18
0
def _setup_mysql_fulltext_indexes():
    for name, cols in _fulltext_indexes.iteritems():
        sql = (
            'ALTER TABLE %%(table)s '
            'ADD FULLTEXT INDEX media_fulltext_%(name)s (%(cols)s)'
        ) % {
            'name': name,
            'cols': ', '.join(col.name for col in cols)
        }
        event.listen(
            media_fulltext,
            u'after_create',
            DDL(sql).execute_if(dialect=u'mysql')
        )
예제 #19
0
def CreateComment(element, comment):
    """
    Returns a DDL to comment on a column. This function can only be used in the
    context of an SQLAlchemy listen event or any other function that supports the
    SQLAlchemy %(fullname)s string formatting.
    """
    if isinstance(element, Table):
        statement = "COMMENT ON TABLE %(fullname)s IS '{0}'".format(comment)

    elif isinstance(element, Column):
        statement = "COMMENT ON COLUMN %(fullname)s.{0} IS '{1}'".format(
            element.name, comment)

    return DDL(statement)
예제 #20
0
    def define_temp_tables(cls, metadata):
        if testing.against("hana"):
            kw = {
                'prefixes': ["GLOBAL TEMPORARY"],
                'oracle_on_commit': 'PRESERVE ROWS'
            }
        else:
            kw = {
                'prefixes': ["TEMPORARY"],
            }

        user_tmp = Table("user_tmp", metadata,
                         Column("id", sa.INT, primary_key=True),
                         Column('name', sa.VARCHAR(50)), Column('foo', sa.INT),
                         sa.UniqueConstraint('name', name='user_tmp_uq'),
                         sa.Index("user_tmp_ix", "foo"), **kw)
        if testing.requires.view_reflection.enabled and \
                testing.requires.temporary_views.enabled:
            event.listen(
                user_tmp, "after_create",
                DDL("create temporary view user_tmp_v as "
                    "select * from user_tmp"))
            event.listen(user_tmp, "before_drop", DDL("drop view user_tmp_v"))
예제 #21
0
def create_partition_insert_trigger(table,
                                    interval=25000,
                                    column='biomolecule_id',
                                    part_label='biomol'):
    """
    Creates a trigger on the given (partitioned) table to insert entries into their correct partition based on biomolecule_id
    """

    create_func = """
        CREATE OR REPLACE FUNCTION %(schema)s.%(table)s_insert_trigger()
        RETURNS TRIGGER AS $$
        DECLARE
            intvl integer;
            part_bound integer;
        BEGIN
            intvl = TG_ARGV[0]::integer;
            IF NEW.{col_id} %% intvl = 0 THEN
                part_bound := NEW.{col_id};
            ELSE
                part_bound := intvl * ((NEW.{col_id} / intvl) + 1);
            END IF;
            EXECUTE format('INSERT INTO %(fullname)s_{part_label}_le_%%s VALUES ($1.*);', part_bound) USING NEW;
            RETURN NULL;
        END;
        $$
        LANGUAGE plpgsql;
        """.format(col_id=column, part_label=part_label)

    create_trigger = """
        CREATE TRIGGER insert_%(table)s_trigger
        BEFORE INSERT ON %(fullname)s
        FOR EACH ROW EXECUTE PROCEDURE %(schema)s.%(table)s_insert_trigger(%(intvl)s);
        """

    event.listen(table, 'after_create', DDL(create_func))
    event.listen(table, 'after_create',
                 DDL(create_trigger, context={'intvl': interval}))
예제 #22
0
    def test_tokens(self):
        m = MetaData()
        bind = self.mock_engine()
        sane_alone = Table('t', m, Column('id', Integer))
        sane_schema = Table('t', m, Column('id', Integer), schema='s')
        insane_alone = Table('t t', m, Column('id', Integer))
        insane_schema = Table('t t', m, Column('id', Integer), schema='s s')

        ddl = DDL('%(schema)s-%(table)s-%(fullname)s')

        self.assertEquals(ddl._expand(sane_alone, bind), '-t-t')
        self.assertEquals(ddl._expand(sane_schema, bind), 's-t-s.t')
        self.assertEquals(ddl._expand(insane_alone, bind), '-"t t"-"t t"')
        self.assertEquals(ddl._expand(insane_schema, bind),
                          '"s s"-"t t"-"s s"."t t"')

        # overrides are used piece-meal and verbatim.
        ddl = DDL('%(schema)s-%(table)s-%(fullname)s-%(bonus)s',
                  context={'schema':'S S', 'table': 'T T', 'bonus': 'b'})
        self.assertEquals(ddl._expand(sane_alone, bind), 'S S-T T-t-b')
        self.assertEquals(ddl._expand(sane_schema, bind), 'S S-T T-s.t-b')
        self.assertEquals(ddl._expand(insane_alone, bind), 'S S-T T-"t t"-b')
        self.assertEquals(ddl._expand(insane_schema, bind),
                          'S S-T T-"s s"."t t"-b')
    def build_fulltext(cls):
        """
        build up fulltext index after table is created
        """
        if FullText not in cls.__bases__:
            return
        assert cls.__fulltext_columns__, "Model:{0.__name__} No FullText columns defined".format(
            cls)

        event.listen(
            cls.__table__, 'after_create',
            DDL(
                MYSQL_BUILD_INDEX_QUERY.format(
                    cls, ", ".join(
                        (escape_quote(c) for c in cls.__fulltext_columns__)))))
예제 #24
0
def with_collkey_ddl():  # pragma: no cover
    """Register creation of collkey function.

    Can be called at module level in db initialization scripts to create the collkey
    function. Once a session is bound to an engine collkey can be used to create indexes
    or in order_by clauses, e.g.::

        Index('ducet', collkey(common.Value.name)).create(DBSession.bind)
    """
    event.listen(
        Base.metadata, 'before_create',
        DDL("""
CREATE OR REPLACE FUNCTION collkey (text, text, bool, int4, bool) RETURNS bytea
    LANGUAGE 'c' IMMUTABLE STRICT AS
    '$libdir/collkey_icu.so',
    'pgsqlext_collkey';
""").execute_if(dialect='postgresql'))
예제 #25
0
    def _search_vector_ddl(cls):
        """
        Returns the ddl for the search vector.
        """
        tablename = cls.__tablename__
        search_vector_name = cls._get_search_option('search_vector_name')

        return DDL(
            """
            ALTER TABLE {table}
            ADD COLUMN {search_vector_name} tsvector
            """
            .format(
                table=quote_identifier(tablename),
                search_vector_name=search_vector_name
            )
        )
예제 #26
0
파일: rack.py 프로젝트: papagr/TheLMA
def _setup_sqlite_ddl(table):
    """
    Barcode default for SQLite using a trigger and the ROWID as the sequence

    It does not conform to how the legacy DB is setup but testing on sqlite
    should not fail. Since we do not plan to use SQLite as the production
    database the DDL below serves only to support development/testing.
    """
    DDL("""
    CREATE TRIGGER set_rack_barcode AFTER INSERT ON rack
    BEGIN
      UPDATE rack
        SET barcode =
          SUBSTR("00000000", length(new.rowid), 8-length(new.rowid)) ||
          new.rowid
        WHERE rowid = new.rowid;
    END;
    """,
        on='sqlite').execute_at('after-create', table)
예제 #27
0
    def _search_trigger_ddl(cls):
        """
        Returns the ddl for creating an automatically updated search trigger.
        """
        tablename = cls.__tablename__
        search_vector_name = cls._get_search_option('search_vector_name')
        search_trigger_name = cls._get_search_option(
            'search_trigger_name').format(table=tablename)

        return DDL("""
            CREATE TRIGGER {search_trigger_name}
            BEFORE UPDATE OR INSERT ON {table}
            FOR EACH ROW EXECUTE PROCEDURE
            tsvector_update_trigger({arguments})
            """.format(search_trigger_name=search_trigger_name,
                       table=quote_identifier(tablename),
                       arguments=', '.join([
                           search_vector_name,
                           "'%s'" % cls._get_search_option('catalog')
                       ] + cls.__searchable_columns__)))
예제 #28
0
    def attach_ddl_listeners(self):
        # Remove all previously added listeners, so that same listener don't
        # get added twice in situations where class configuration happens in
        # multiple phases (issue #31).
        self.remove_listeners()

        for column in self.processed_columns:
            # This sets up the trigger that keeps the tsvector column up to
            # date.
            if column.type.columns:
                table = column.table
                if (self.option(column, 'weights')
                        or vectorizer.contains_tsvector(column)):
                    self.add_listener((table, 'after_create',
                                       self.search_function_ddl(column)))
                    self.add_listener(
                        (table, 'after_drop',
                         DDL(str(DropSearchFunctionSQL(column)))))
                self.add_listener(
                    (table, 'after_create', self.search_trigger_ddl(column)))
예제 #29
0
    def test_platform_escape(self):
        """test the escaping of % characters in the DDL construct."""

        default_from = testing.db.dialect.statement_compiler(
            testing.db.dialect, None).default_from()

        # We're abusing the DDL()
        # construct here by pushing a SELECT through it
        # so that we can verify the round trip.
        # the DDL() will trigger autocommit, which prohibits
        # some DBAPIs from returning results (pyodbc), so we
        # run in an explicit transaction.
        with testing.db.begin() as conn:
            eq_(
                conn.execute(text("select 'foo%something'" +
                                  default_from)).scalar(), 'foo%something')

            eq_(
                conn.execute(DDL("select 'foo%%something'" +
                                 default_from)).scalar(), 'foo%something')
def engine(db):
    """Session-wide database engine"""
    engine = db.engine
    engine.execute("create extension postgis")
    engine.execute(CreateSchema('crmp'))
    pycds.Base.metadata.create_all(bind=engine)
    # TODO: Is this CREATE OR REPLACE FUNCTION necessary?
    sqlalchemy.event.listen(
        pycds.weather_anomaly.Base.metadata,
        'before_create',
        DDL('''
                CREATE OR REPLACE FUNCTION crmp.DaysInMonth(date) RETURNS double precision AS
                $$
                    SELECT EXTRACT(DAY FROM CAST(date_trunc('month', $1) + interval '1 month' - interval '1 day'
                    as timestamp));
                $$ LANGUAGE sql;
            ''')
    )
    pycds.weather_anomaly.Base.metadata.create_all(bind=engine)

    yield engine
예제 #31
0
    def search_index_ddl(self, column):
        """
        Returns the ddl for creating the actual search index.

        :param column: TSVectorType typed SQLAlchemy column object
        """
        tablename = column.table.name
        search_index_name = self.option(column, 'search_index_name').format(
            table=tablename,
            column=column.name
        )
        return DDL(
            """
            CREATE INDEX {search_index_name} ON {table}
            USING gin({search_vector_name})
            """
            .format(
                table=quote_identifier(tablename),
                search_index_name=search_index_name,
                search_vector_name=column.name
            )
        )
예제 #32
0
def install_trigram_indice_on_column(table, column):

	idx_name = '{table}_{column}_trigram_idx'.format(table = table.__tablename__, column = column)
	create_idx_sql = '''
	CREATE INDEX
		{idx_name}
	ON
		{table}
	USING
		gin ({column} gin_trgm_ops)'''.format(idx_name=idx_name, table=table.__tablename__, column=column)

	try:
		db.engine.execute('''SELECT '{schema}.{idx}'::regclass;'''.format(schema='public', idx=idx_name))
		print("Do not need to create index", idx_name)
	except sqlalchemy.exc.ProgrammingError:
		# index doesn't exist, need to create it.
		print("Creating index {idx} on table {tbl}".format(idx=idx_name, tbl=table.__tablename__))
		db.engine.execute(
				DDL(
					create_idx_sql
				)
			)
예제 #33
0
def add_reports():

    form = AddReportForm()

    if form.validate_on_submit():
        stu_usn = form.stu_usn.data
        st2_name = form.st2_name.data
        dept4 = form.dept4.data
        sem4 = form.sem4.data

        attpercentage = form.attpercentage.data

        new_report = report(stu_usn, st2_name, dept4, sem4, attpercentage)
        db.session.add(new_report)
        db.session.commit()

    return render_template('report.html', form=form)

    update_subject_dept = DDL('''\
    CREATE TRIGGER update_subject_dept UPDATE OF tc_dept ON TEACHERS
    BEGIN
    UPDATE SUBJECT SET dept3 = 'eee' WHERE sub_id=4;
    END;''')
    event.listen(teacher.__table__, 'after_create', update_subject_dept)
예제 #34
0
파일: View.py 프로젝트: timseed/SQLORM_View
def create_view(name, selectable, metadata=Meta):
    _mt = MetaData()  # temp metadata just for initial Table object creation
    t = Table(name, metadata)  # the actual mat view class is bound to metadata
    for c in selectable.c:
        t.append_column(Column(c.name, c.type, primary_key=True))
        print(f"View {name} adding Col {c.name}")

    if not (any([c.primary_key for c in selectable.c])):
        t.append_constraint(
            PrimaryKeyConstraint(*[c.name for c in selectable.c]))
        print(f"Adding PK Constraint {c.name}")

    event.listen(metadata, 'after_create', CreateView(name, selectable))

    @event.listens_for(metadata, 'after_create')
    def create_indexes(target, connection, **kw):
        print("After Create Fired")
        for idx in t.indexes:
            print("Creating Index")
            idx.create(connection)

    event.listen(metadata, 'before_drop', DDL('DROP VIEW IF EXISTS ' + name))

    return t
예제 #35
0
파일: models.py 프로젝트: BlueNexus/evesrp
            direction = Decimal(1)
        else:
            # A modifier already on a request is being voided
            direction = Decimal(-1)
        if isinstance(modifier, AbsoluteModifier):
            absolute += direction * modifier.value
        elif isinstance(modifier, RelativeModifier):
            relative += direction * modifier.value
        payout = (srp_request.base_payout + absolute) * \
                (Decimal(1) + relative)
        srp_request.payout = PrettyDecimal(payout)


# The next few lines are responsible for adding a full text search index on the
# Request.details column for MySQL.
_create_fts = DDL('CREATE FULLTEXT INDEX ix_%(table)s_details_fulltext '
                       'ON %(table)s (details);')
_drop_fts = DDL('DROP INDEX ix_%(table)s_details_fulltext ON %(table)s')


event.listen(
        Request.__table__,
        'after_create',
        _create_fts.execute_if(dialect='mysql')
)


event.listen(
        Request.__table__,
        'before_drop',
        _drop_fts.execute_if(dialect='mysql')
)
예제 #36
0
파일: session.py 프로젝트: vigilo/models
def DDL(statement, when=None, obj=None, bind=None, dialect=None, context=None):
    """
    Construit un objet de type DDL en utilisant l'API
    correspondant à la version de SQLAlchemy en cours
    d'utilisation.

    Cette fonction peut être utilisée pour exécuter
    une commande SQL de deux manières distinctes.
    -   Soit lorsqu'un événement se produit (par exemple,
        lors de la création d'une certaine table du modèle),
        en passant les paramètres when et obj.
    -   Soit immédiatement en ne passant que le paramètre bind.
    Ces deux cas d'usage sont mutuellement exclusifs.

    @param statement: Commande SQL à exécuter touchant au
        schéma de la base (et donc, utilisant le DDL).
    @type statement: C{str}
    @param when: Nom de l'événement à attendre pour exécuter
        le DDL, par exemple "before-create".
    @type when: C{str}
    @param obj: Objet support du DDL à exécuter (généralement
        une Table).
    @param bind: Connexion à la base de données (Session ou Metadata),
        dans le cas où le DDL doit être exécuté immédiatement.
    @param dialect: Nom d'un dialecte SQL supporté par SQLAlchemy.
        Le DDL ne sera exécuté que si le dialecte utilisé par la
        base de données correspond à donné ici.
    @type dialect: C{str}
    @param context: Contexte qui sera passé au DDL (interpolé)
        au moment de son exécution.
    @type context: C{dict}
    """
    # 2 cas d'utilisation exclusifs :
    # - soit lorsqu'un événement se produit
    # - soit en exécution immédiate
    assert (bind is None or when is not None) or \
           (bind is not None or when is None)
    if context is None:
        context = {}

    # SQLAlchemy 0.5/0.6.
    if SaEvent is None:
        # Les dialectes "postgres" et "postgresql" sont définis
        # comme des aliases l'un par rapport à l'autre (sha:7d7d6c2).
        # On doit supporter le passage de l'un ou l'autre des noms.
        if dialect in ('postgres', 'postresql'):
            dialect = ('postgres', 'postgresql')
        if isinstance(dialect, (list, tuple)):
            # @HACK: Dans SQLAlchemy 0.5.x, seulement 3 arguments
            # positionnels sont passés : event, target & bind.
            # Dans SQLAlchemy 0.6.x, il y en a 4 : self, event, target & bind.
            # On ne s'intéresse qu'à bind (dernier argument à chaque fois).
            condition = lambda *args, **kwargs: args[-1].engine.name in dialect
        else:
            condition = dialect
        ddl = SaDDL(statement, on=condition, context=context)
        if when is None:
            ddl.execute(bind)
        else:
            # Les mots dans le nom de l'événement sont séparés par
            # des '-' dans cette version de SQLAlchemy.
            ddl.execute_at(when.replace('_', '-'), obj)
    # SQLAlchemy 0.7+
    else:
        ddl = SaDDL(statement, context=context)
        if when is None:
            ddl.execute(bind)
        else:
            # Les mots dans le nom de l'événement sont séparés par
            # des '_' dans cette version de SQLAlchemy.
            SaEvent.listen(obj, when.replace('-', '_'),
                         ddl.execute_if(dialect=dialect))
예제 #37
0
파일: db.py 프로젝트: thlor/portalmonitor
    def __init__(self,
                 db='portalwatch',
                 host="localhost",
                 port=5432,
                 password=None,
                 user='******',
                 debug=False):

        # Define our connection string
        self.log = log.new()

        conn_string = "postgresql://"
        if user:
            conn_string += user
        if password:
            conn_string += ":" + password
        if host:
            conn_string += "@" + host
        if port:
            conn_string += ":" + str(port)
        conn_string += "/" + db
        log.info("Connecting DB")

        self.engine = create_engine(conn_string,
                                    pool_size=20,
                                    client_encoding='utf8',
                                    echo=debug)
        add_engine_pidguard(self.engine)
        #add_query_time_logging(self.engine)
        # register_after_fork(self.engine, self.engine.dispose)
        log.info("Connected DB")
        # self.engine.connect()

        self.session_factory = sessionmaker(
            bind=self.engine)  # , expire_on_commit=False

        # self.session = self.Session()

        self.dataset_insert_function = DDL("""
            CREATE OR REPLACE FUNCTION dataset_insert_function()
            RETURNS TRIGGER AS $$

            DECLARE
                _snapshot smallint;
                _table_name text;

            BEGIN
                _snapshot := NEW.snapshot;
                _table_name := '""" + tab_datasets + """_' || _snapshot;

                    PERFORM 1 FROM pg_tables WHERE tablename = _table_name;

                      IF NOT FOUND THEN
                        EXECUTE
                          'CREATE TABLE '
                          || quote_ident(_table_name)
                          || ' (CHECK ("snapshot" = '
                          || _snapshot::smallint
                          || ')) INHERITS (""" + tab_datasets + """)';

                        -- Indexes are defined per child, so we assign a default index that uses the partition columns
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_org') || ' ON '||quote_ident(_table_name) || ' (organisation)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_sn')  || ' ON '||quote_ident(_table_name) || ' (snapshot)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_pid') || ' ON '||quote_ident(_table_name) || ' (portalid)';
                        EXECUTE 'ALTER TABLE '  || quote_ident(_table_name)|| ' ADD CONSTRAINT ' || quote_ident(_table_name||'_pkey') || ' PRIMARY KEY (id, snapshot, portalid)';
                        EXECUTE 'ALTER TABLE '  || quote_ident(_table_name)|| ' ADD CONSTRAINT ' || quote_ident(_table_name||'_md5_fkey') || ' FOREIGN KEY (md5) REFERENCES datasetsdata (md5) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION';
                        EXECUTE 'ALTER TABLE '  || quote_ident(_table_name)|| ' ADD CONSTRAINT ' || quote_ident(_table_name||'_portalid_fkey') || ' FOREIGN KEY (portalid, snapshot) REFERENCES portalsnapshot (portalid, snapshot) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION';


                      END IF;

                      EXECUTE
                        'INSERT INTO '
                        || quote_ident(_table_name)
                        || ' VALUES ($1.*)'
                      USING NEW;

                      RETURN NULL;
                END;

                $$ LANGUAGE plpgsql;
                """)
        self.dataset_insert_trigger = DDL("""
            CREATE TRIGGER dataset_insert_trigger
            BEFORE INSERT ON """ + tab_datasets + """
                FOR EACH ROW EXECUTE PROCEDURE dataset_insert_function();
                """)
        self.resourcesinfo_insert_function = DDL("""
            CREATE OR REPLACE FUNCTION resourcesinfo_insert_function()
            RETURNS TRIGGER AS $$

            DECLARE
                _snapshot smallint;
                _table_name text;

            BEGIN
                _snapshot := NEW.snapshot;
                _table_name := '""" + tab_resourcesinfo + """_' || _snapshot;

                    PERFORM 1 FROM pg_tables WHERE tablename = _table_name;

                      IF NOT FOUND THEN
                        EXECUTE
                          'CREATE TABLE '
                          || quote_ident(_table_name)
                          || ' (CHECK ("snapshot" = '
                          || _snapshot::smallint
                          || ')) INHERITS (""" + tab_resourcesinfo + """)';

                        -- Indexes are defined per child, so we assign a default index that uses the partition columns
                        EXECUTE 'ALTER TABLE '  || quote_ident(_table_name)|| ' ADD CONSTRAINT ' || quote_ident(_table_name||'_pkey') || ' PRIMARY KEY (uri, snapshot)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_status') || ' ON '||quote_ident(_table_name) || ' (status)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_uri') || ' ON '||quote_ident(_table_name) || ' (uri)';
                      END IF;

                      EXECUTE
                        'INSERT INTO '
                        || quote_ident(_table_name)
                        || ' VALUES ($1.*)'
                      USING NEW;



                      RETURN NULL;
                END;

                $$ LANGUAGE plpgsql;
                """)
        self.resourcesinfo_insert_trigger = DDL("""
            CREATE TRIGGER resourcesinfo_insert_trigger
            BEFORE INSERT ON """ + tab_resourcesinfo + """
                FOR EACH ROW EXECUTE PROCEDURE resourcesinfo_insert_function();
                """)

        self.resourcescrawllog_insert_function = DDL("""
            CREATE OR REPLACE FUNCTION resourcescrawllog_insert_function()
            RETURNS TRIGGER AS $$

            DECLARE
                _snapshot smallint;
                _table_name text;

            BEGIN
                _snapshot := NEW.snapshot;
                _table_name := '""" + tab_resourcescrawllog +
                                                     """_' || _snapshot;

                    PERFORM 1 FROM pg_tables WHERE tablename = _table_name;

                      IF NOT FOUND THEN
                        EXECUTE
                          'CREATE TABLE '
                          || quote_ident(_table_name)
                          || ' (CHECK ("snapshot" = '
                          || _snapshot::smallint
                          || ')) INHERITS (""" + tab_resourcescrawllog + """)';

                        -- Indexes are defined per child, so we assign a default index that uses the partition columns
                        EXECUTE 'ALTER TABLE '  || quote_ident(_table_name)|| ' ADD CONSTRAINT ' || quote_ident(_table_name||'_pkey') || ' PRIMARY KEY (uri, snapshot, timestamp)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_status') || ' ON '||quote_ident(_table_name) || ' (status)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_uri') || ' ON '||quote_ident(_table_name) || ' (uri)';
                        EXECUTE 'CREATE INDEX ' || quote_ident(_table_name||'_domain') || ' ON '||quote_ident(_table_name) || ' (domain)';
                      END IF;

                      EXECUTE
                        'INSERT INTO '
                        || quote_ident(_table_name)
                        || ' VALUES ($1.*)'
                      USING NEW;



                      RETURN NULL;
                END;

                $$ LANGUAGE plpgsql;
                """)
        self.resourcescrawllog_insert_trigger = DDL("""
            CREATE TRIGGER resourcescrawllog_insert_trigger
            BEFORE INSERT ON """ + tab_resourcescrawllog + """
                FOR EACH ROW EXECUTE PROCEDURE resourcescrawllog_insert_function();
                """)
예제 #38
0
파일: models.py 프로젝트: boweiliu/evesrp
            relative = Decimal(0)
        # The modifier that's changed isn't reflected yet in the database, so we
        # apply it here.
        if isinstance(value, Request):
            # A modifier being added to the Request
            if modifier.voided:
                # The modifier being added is already void
                return
            direction = Decimal(1)
        else:
            # A modifier already on a request is being voided
            direction = Decimal(-1)
        if isinstance(modifier, AbsoluteModifier):
            absolute += direction * modifier.value
        elif isinstance(modifier, RelativeModifier):
            relative += direction * modifier.value
        payout = (srp_request.base_payout + absolute) * (Decimal(1) + relative)
        srp_request.payout = PrettyDecimal(payout)


# The next few lines are responsible for adding a full text search index on the
# Request.details column for MySQL.
_create_fts = DDL("CREATE FULLTEXT INDEX ix_%(table)s_details_fulltext " "ON %(table)s (details);")
_drop_fts = DDL("DROP INDEX ix_%(table)s_details_fulltext ON %(table)s")


event.listen(Request.__table__, "after_create", _create_fts.execute_if(dialect="mysql"))


event.listen(Request.__table__, "before_drop", _drop_fts.execute_if(dialect="mysql"))