Esempio n. 1
0
 def __init__(self, **kw):
     # DBAPI BUG:
     # named parameters don't work:
     # "Parameters must be given as a sequence."
     # https://bugs.launchpad.net/myconnpy/+bug/454782
     kw["paramstyle"] = "format"
     MySQLDialect.__init__(self, **kw)
Esempio n. 2
0
 def get_view_names(self, connection, schema=None, **kw):
     return MySQLDialect.get_view_names(self,
                                         connection.connect().\
                                                 execution_options(_oursql_plain_query=True),
                                         schema=schema,
                                         **kw
     )
Esempio n. 3
0
    def test_with_extra_params(self):
        metadata = MetaData()
        table = Table('table', metadata,
            Column('id', Integer, primary_key=True),
            Column('name', Unicode(16), nullable=False, unique=True),
            Column('extra', Unicode(16), nullable=False),
        )
        clause = ConditionalInsert(table, {table.c.name: 'asdf'},
                {table.c.extra: 'something'})

        #pylint: disable=E1120
        compiled = clause.compile(dialect=MySQLDialect())
        self.assertEquals(str(compiled),
                'INSERT INTO `table` (name, extra)\n'
                'SELECT %s, %s\n'
                'FROM DUAL\n'
                'WHERE NOT (EXISTS (SELECT 1 \n'
                'FROM `table` \n'
                'WHERE `table`.name = %s FOR UPDATE))')
        self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1'])
        self.assertEquals(compiled.params, {'name': 'asdf',
                'extra': 'something', 'name_1': 'asdf'})

        #pylint: disable=E1120
        compiled = clause.compile(dialect=SQLiteDialect())
        self.assertEquals(str(compiled),
                'INSERT INTO "table" (name, extra)\n'
                'SELECT ?, ?\n'
                'WHERE NOT (EXISTS (SELECT 1 \n'
                'FROM "table" \n'
                'WHERE "table".name = ?))')
        self.assertEquals(compiled.positiontup, ['name', 'extra', 'name_1'])
        self.assertEquals(compiled.params, {'name': 'asdf',
                'extra': 'something', 'name_1': 'asdf'})
Esempio n. 4
0
    def test_unique_params_only(self):
        metadata = MetaData()
        table = Table(
            'table',
            metadata,
            Column('id', Integer, primary_key=True),
            Column('name', Unicode(16), nullable=False, unique=True),
        )
        clause = ConditionalInsert(table, {table.c.name: 'asdf'})

        # there is a bug in upstream in pylint so we have to disable it for
        # SQLAlchemy 0.9.
        # https://bitbucket.org/logilab/astroid/issue/39/support-for-sqlalchemy
        #pylint: disable=E1120
        compiled = clause.compile(dialect=MySQLDialect())
        self.assertEquals(
            str(compiled), 'INSERT INTO `table` (name)\n'
            'SELECT %s\n'
            'FROM DUAL\n'
            'WHERE NOT (EXISTS (SELECT 1 \n'
            'FROM `table` \n'
            'WHERE `table`.name = %s FOR UPDATE))')
        self.assertEquals(compiled.positiontup, ['name', 'name_1'])
        self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'})

        #pylint: disable=E1120
        compiled = clause.compile(dialect=SQLiteDialect())
        self.assertEquals(
            str(compiled), 'INSERT INTO "table" (name)\n'
            'SELECT ?\n'
            'WHERE NOT (EXISTS (SELECT 1 \n'
            'FROM "table" \n'
            'WHERE "table".name = ?))')
        self.assertEquals(compiled.positiontup, ['name', 'name_1'])
        self.assertEquals(compiled.params, {'name': 'asdf', 'name_1': 'asdf'})
 def test_load_dialect_impl(self):
     dialect = MySQLDialect()
     impl = self.sqltype.load_dialect_impl(dialect)
     self.assertNotEqual(types.Text, type(impl))
     dialect = SQLiteDialect()
     impl = self.sqltype.load_dialect_impl(dialect)
     self.assertEqual(types.Text, type(impl))
Esempio n. 6
0
class Db_MySql(BaseDb):
    """Functionality for using sqlalchemy to connect to a MySQL database."""

    CANONICAL_SCHEME = "mysql"
    INTERNAL_SCHEME = "mysql+pymysql"

    preparer = MySQLIdentifierPreparer(MySQLDialect())

    @classmethod
    def create_engine(cls, msurl):
        def _on_checkout(mysql_conn, connection_record, connection_proxy):
            dbcur = mysql_conn.cursor()
            # +00:00 is UTC, but unlike UTC, it works even without a timezone DB.
            dbcur.execute("SET time_zone='+00:00';")
            dbcur.execute("SET sql_mode = 'ANSI_QUOTES';")

        url = urlsplit(msurl)
        if url.scheme != cls.CANONICAL_SCHEME:
            raise ValueError("Expecting mysql://")
        url_path = url.path or "/"  # Empty path doesn't work with non-empty query.
        url_query = cls._append_to_query(url.query, {"program_name": "kart"})
        msurl = urlunsplit(
            [cls.INTERNAL_SCHEME, url.netloc, url_path, url_query, ""])

        engine = sqlalchemy.create_engine(msurl, poolclass=cls._pool_class())
        sqlalchemy.event.listen(engine, "checkout", _on_checkout)

        return engine

    @classmethod
    def list_tables(cls, sess, db_schema=None):
        # TODO - include titles.
        if db_schema is not None:
            r = sess.execute(
                sqlalchemy.text("""
                    SELECT TABLE_NAME
                    FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = :db_schema
                    ORDER BY TABLE_NAME;
                    """),
                {"db_schema": db_schema},
            )
            return {row["TABLE_NAME"]: None for row in r}
        else:
            r = sess.execute(
                sqlalchemy.text("""
                    SELECT TABLE_SCHEMA, TABLE_NAME
                    FROM INFORMATION_SCHEMA.TABLES
                    ORDER BY TABLE_SCHEMA, TABLE_NAME;
                    """))
            return {
                f"{row['TABLE_SCHEMA']}.{row['TABLE_NAME']}": None
                for row in r
            }

    @classmethod
    def drop_all_in_schema(cls, sess, db_schema):
        """Drops all tables and routines in schema db_schema."""
        for thing in ("table", "routine"):
            cls._drop_things_in_schema(cls, sess, db_schema, thing)
Esempio n. 7
0
 def test_get_spatial_dialect(self):
     spatial_dialect = DialectManager.get_spatial_dialect(PGDialect_psycopg2())
     ok_(isinstance(spatial_dialect, PGSpatialDialect))
     ok_(isinstance(DialectManager.get_spatial_dialect(MySQLDialect()), MySQLSpatialDialect))
     ok_(isinstance(DialectManager.get_spatial_dialect(SQLiteDialect()), SQLiteSpatialDialect))
     ok_(isinstance(DialectManager.get_spatial_dialect(OracleDialect()), OracleSpatialDialect))
     ok_(isinstance(DialectManager.get_spatial_dialect(MSDialect()), MSSpatialDialect))
     spatial_dialect2 = DialectManager.get_spatial_dialect(PGDialect_psycopg2())
     ok_(spatial_dialect is spatial_dialect2, "only one instance per dialect should be created")
Esempio n. 8
0
 def _show_create_table(self,
                        connection,
                        table,
                        charset=None,
                        full_name=None):
     return MySQLDialect._show_create_table(
         self,
         connection.contextual_connect(
             close_with_result=True).execution_options(
                 _oursql_plain_query=True), table, charset, full_name)
Esempio n. 9
0
def unit_tests() -> None:
    from sqlalchemy.dialects.mssql.base import MSDialect
    from sqlalchemy.dialects.mysql.base import MySQLDialect
    d_mssql = MSDialect()
    d_mysql = MySQLDialect()
    col1 = Column('hello', BigInteger, nullable=True)
    col2 = Column('world', BigInteger,
                  autoincrement=True)  # does NOT generate IDENTITY
    col3 = make_bigint_autoincrement_column('you', d_mssql)
    metadata = MetaData()
    t = Table('mytable', metadata)
    t.append_column(col1)
    t.append_column(col2)
    t.append_column(col3)

    print("Checking Column -> DDL: SQL Server (mssql)")
    test_assert(column_creation_ddl(col1, d_mssql), "hello BIGINT NULL")
    test_assert(column_creation_ddl(col2, d_mssql), "world BIGINT NULL")
    test_assert(column_creation_ddl(col3, d_mssql),
                "you BIGINT NOT NULL IDENTITY(1,1)")

    print("Checking Column -> DDL: MySQL (mysql)")
    test_assert(column_creation_ddl(col1, d_mysql), "hello BIGINT")
    test_assert(column_creation_ddl(col2, d_mysql), "world BIGINT")
    # not col3; unsupported

    print("Checking SQL type -> SQL Alchemy type")
    to_check = [
        # mssql
        ("BIGINT", d_mssql),
        ("NVARCHAR(32)", d_mssql),
        ("NVARCHAR(MAX)", d_mssql),
        ('NVARCHAR(160) COLLATE "Latin1_General_CI_AS"', d_mssql),
        # mysql
        ("BIGINT", d_mssql),
        ("LONGTEXT", d_mysql),
    ]
    for coltype, dialect in to_check:
        print("... {} -> dialect {} -> {}".format(
            repr(coltype),
            repr(dialect.name),
            repr(get_sqla_coltype_from_dialect_str(coltype, dialect))))
Esempio n. 10
0
 def initialize(self, connection):
     return MySQLDialect.initialize(
                         self, 
                         connection.execution_options(_oursql_plain_query=True)
                         )
Esempio n. 11
0
 def get_schema_names(self, connection, **kw):
     return MySQLDialect.get_schema_names(self,
                                 connection.connect().\
                                             execution_options(_oursql_plain_query=True),
                                 **kw
     )
Esempio n. 12
0
 def has_table(self, connection, table_name, schema=None):
     return MySQLDialect.has_table(self, 
                                     connection.connect().\
                                         execution_options(_oursql_plain_query=True),
                                     table_name, schema)
Esempio n. 13
0
 def table_names(self, connection, schema):
     return MySQLDialect.table_names(self,
                         connection.connect().\
                                     execution_options(_oursql_plain_query=True),
                         schema
     )
Esempio n. 14
0
def _get_crate_dd_row(
    column: Union[Column, CamcopsColumn, None],
    recipient: "ExportRecipientInfo",
    dest_dialect: Dialect = None,
    src_db: str = "camcops",
    default_indexlen: int = 100,
) -> Dict:
    """
    Args:
        column:
            A column specification (or ``None`` to create a dummy dictionary).
        recipient:
            a :class:`camcops_server.cc_modules.cc_exportrecipientinfo.ExportRecipientInfo`
        dest_dialect:
            The SQL dialect of the destination database. If ``None``, then
            MySQL is used as the default.
        src_db:
            Value to be used for the "src_db" field.
        default_indexlen:
            Default index length for fields that require one.

    Returns:
        An :class:`OrderedDict` with information for a CRATE data dictionary
        row.
    """  # noqa
    dest_dialect = dest_dialect or MySQLDialect()
    exempt_from_anonymisation = False
    identifies_patient = False
    identifies_respondent = False
    force_include = False
    if column is None:
        # Dummy row
        colname = None
        tablename = None
        comment = None
        coltype = None
        needs_scrubbing = False
        desttype = None
        destsqltype = None
    else:
        colname = column.name
        tablename = column.table.name
        comment = column.comment
        coltype = coltype_as_typeengine(column.type)
        is_free_text = is_sqlatype_text_of_length_at_least(
            coltype, min_length=MIN_STRING_LENGTH_TO_CONSIDER_SCRUBBING)

        if isinstance(column, CamcopsColumn):
            exempt_from_anonymisation = column.exempt_from_anonymisation
            identifies_patient = column.identifies_patient
            force_include = column.include_in_anon_staging_db

        needs_scrubbing = is_free_text and not exempt_from_anonymisation
        desttype = convert_sqla_type_for_dialect(
            coltype=coltype,
            dialect=dest_dialect,
            strip_collation=True,
            expand_for_scrubbing=needs_scrubbing,
        )
        destsqltype = desttype.compile(dialect=dest_dialect)

    # src_flags
    src_flags = []  # type: List[str]
    primary_key = colname == FN_PK
    if primary_key:
        src_flags.extend(["K", "C"])
    primary_pid = (
        recipient.db_patient_id_per_row
        and recipient.primary_idnum  # otherwise just in PatientIdNum
        and colname == extra_id_colname(recipient.primary_idnum))
    if primary_pid:
        src_flags.append("P")
    defines_primary_pids = False  # no single unique table for this...
    if defines_primary_pids:
        src_flags.append("*")
    master_pid = False  # not supported for now
    if master_pid:
        src_flags.append("M")

    # scrub_src
    if identifies_patient and tablename == Patient.__tablename__:
        scrub_src = "patient"
    elif identifies_respondent:
        scrub_src = "thirdparty"
    else:
        scrub_src = None

    # scrub_method
    scrub_method = None  # default is fine

    # Include in output?
    include = (force_include or primary_key or primary_pid or master_pid
               or not (identifies_patient or identifies_respondent))

    # alter_method
    if needs_scrubbing:
        alter_method = "scrub"
    elif tablename == Patient.__tablename__ and colname == Patient.dob.name:
        alter_method = "truncate_date"
    else:
        alter_method = None

    # Indexing
    crate_index = None
    crate_indexlen = None
    if column is not None and column.index:
        crate_index = "U" if column.unique else "I"
        if does_sqlatype_require_index_len(desttype):
            crate_indexlen = default_indexlen

    return OrderedDict([
        ("src_db", src_db),
        ("src_table", tablename),
        ("src_field", colname),
        ("src_datatype", str(coltype)),
        ("src_flags", "".join(src_flags) if src_flags else None),
        ("scrub_src", scrub_src),
        ("scrub_method", scrub_method),
        ("decision", "include" if include else "OMIT"),
        ("inclusion_values", None),
        ("exclusion_values", None),
        ("alter_method", alter_method),
        ("dest_table", tablename),
        ("dest_field", colname),
        ("dest_datatype", destsqltype),
        ("index", crate_index),
        ("indexlen", crate_indexlen),
        ("comment", comment),
    ])
Esempio n. 15
0
def _get_cris_dd_row(
    column: Union[Column, CamcopsColumn, None],
    recipient: "ExportRecipientInfo",
    dest_dialect: Dialect = None,
) -> Dict:
    """
    Args:
        column:
            A column specification (or ``None`` to create a dummy dictionary).
        dest_dialect:
            The SQL dialect of the destination database. If ``None``, then
            MySQL is used as the default.

    Returns:
        An :class:`OrderedDict` with information for a CRIS data dictionary
        row.
    """
    dest_dialect = dest_dialect or MySQLDialect()  # MSDialect() for SQL Server
    valid_values = None
    if column is None:
        # Dummy row
        colname = None
        tablename = None
        taskname = None
        comment = None
        feft = None
        security_status = None
        finaltype = None
        tlfa = None
        size = None
    else:
        colname = column.name
        tablename = column.table.name
        taskname = tablename
        comment = column.comment
        coltype = coltype_as_typeengine(column.type)
        is_free_text = is_sqlatype_text_of_length_at_least(
            coltype, min_length=MIN_STRING_LENGTH_TO_CONSIDER_SCRUBBING)
        exempt_from_anonymisation = False
        identifies_patient = False

        if isinstance(column, CamcopsColumn):
            exempt_from_anonymisation = column.exempt_from_anonymisation
            identifies_patient = column.identifies_patient
            if column.permitted_value_checker:
                valid_values = (
                    column.permitted_value_checker.permitted_values_csv())

        needs_scrubbing = is_free_text and not exempt_from_anonymisation

        # Tag list - fields anon
        tlfa = "Y" if needs_scrubbing else ""

        # Destination SQL type
        desttype = convert_sqla_type_for_dialect(
            coltype=coltype,
            dialect=dest_dialect,
            strip_collation=True,
            expand_for_scrubbing=needs_scrubbing,
        )
        destsqltype = desttype.compile(dialect=dest_dialect)
        finaltype, size = _get_type_size_as_text_from_sqltype(destsqltype)

        # Security status
        system_id = colname == TABLET_ID_FIELD or colname.endswith("_id")
        patient_idnum_field = colname.startswith(EXTRA_IDNUM_FIELD_PREFIX)
        internal_field = colname.startswith("_")
        if identifies_patient and (tablename == Patient.__tablename__
                                   and colname == Patient.dob.name):
            security_status = 3  # truncate (e.g. DOB, postcode)
        elif identifies_patient and tablename == Patient.__tablename__:
            security_status = 2  # use to scrub
        elif system_id or internal_field or identifies_patient:
            security_status = 1  # drop (e.g. for pointless internal keys)
        else:
            security_status = 4  # bring through

        # Front end field type
        if system_id or patient_idnum_field:
            feft = 34  # patient ID; other internal keys
        elif is_sqlatype_date(coltype):
            feft = 4  # dates
        elif is_free_text:
            feft = 3  # giant free text, I think
        elif valid_values is not None:
            feft = 2  # picklist
        else:
            feft = 1  # text, numbers

    return OrderedDict([
        ("Tab", "CamCOPS"),
        ("Form name", taskname),
        ("CRIS tree label", colname),
        ("Source system table name", tablename),
        ("SQL column name", colname),
        ("Front end field type", feft),
        ("Valid values", valid_values),
        ("Result column name", colname),
        ("Family doc tab name", ""),
        ("Family doc form name", ""),
        ("Security status", security_status),
        ("Exclude", ""),
        ("End SQL Type", finaltype),
        ("Header field (Y/N)", ""),
        ("Header field name", ""),
        ("Header field active (Y/N)", ""),
        ("View name", ""),
        ("Exclude from family doc", ""),
        ("Tag list - fields anon", tlfa),
        ("Anon type", ""),  # formerly "Additional info"
        ("Form start date", ""),
        ("Form end date", ""),
        ("Source", ""),
        ("Size", size),
        ("Header logic", ""),
        ("Patient/contact", ""),
        ("Comments", comment),
    ])
Esempio n. 16
0
 def _show_create_table(self, connection, table, charset=None,
                        full_name=None):
     return MySQLDialect._show_create_table(self,
                             connection.contextual_connect(close_with_result=True).
                             execution_options(_oursql_plain_query=True),
                             table, charset, full_name)
Esempio n. 17
0
from pybigquery.sqlalchemy_bigquery import BigQueryDialect
from snowflake.sqlalchemy.snowdialect import SnowflakeDialect
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.postgresql.base import PGDialect

from panoramic.cli.husky.service.types.enums import HuskyQueryRuntime

RUNTIME_DIALECTS = {
    HuskyQueryRuntime.snowflake: SnowflakeDialect(),
    HuskyQueryRuntime.bigquery: BigQueryDialect(),
    HuskyQueryRuntime.mysql: MySQLDialect(),
    HuskyQueryRuntime.postgres: PGDialect(),
}
Esempio n. 18
0
 def __init__(self, **kw):
     # deal with http://code.google.com/p/pyodbc/issues/detail?id=25
     kw.setdefault('convert_unicode', True)
     MySQLDialect.__init__(self, **kw)
     PyODBCConnector.__init__(self, **kw)