def visit_create_table(self, create):
        table = create.element
        preparer = self.preparer

        text = '\nCREATE EXTERNAL '
        text += 'TABLE ' + preparer.format_table(table) + ' '
        text += '('

        separator = '\n'
        for create_column in create.columns:
            column = create_column.element
            try:
                processed = self.process(create_column)
                if processed is not None:
                    text += separator
                    separator = ", \n"
                    text += "\t" + processed
            except exc.CompileError as ce:
                util.raise_from_cause(
                    exc.CompileError(
                        util.u("(in table '{0}', column '{1}'): {2}").format(
                            table.description, column.name, ce.args[0])))

        const = self.create_table_constraints(
            table,
            _include_foreign_key_constraints=create.
            include_foreign_key_constraints,
        )
        if const:
            text += separator + "\t" + const

        text += "\n)\n%s\n\n" % self.post_create_table(table)
        return text
Exemple #2
0
 def connect():
     try:
         return dialect.connect(*cargs, **cparams)
     except Exception as e:
         invalidated = dialect.is_disconnect(e, None, None)
         util.raise_from_cause(
             exc.DBAPIError.instance(None, None, e, dialect.dbapi.Error, connection_invalidated=invalidated)
         )
Exemple #3
0
 def connect():
     try:
         return dialect.connect(*cargs, **cparams)
     except dialect.dbapi.Error as e:
         invalidated = dialect.is_disconnect(e, None, None)
         util.raise_from_cause(
             exc.DBAPIError.instance(
                 None, None, e, dialect.dbapi.Error,
                 connection_invalidated=invalidated
             )
         )
    def visit_create_table(self, create):
        """
        Overrides the ``visit_create_table()`` method on the DDLCompiler base class to implement Hive-specific
        logic, such as creating external tables, removing foreign key and unique constraints (since those are not
        supported by Hive), and making sure the partition columns are not duplicated in the column definitions.
        """
        table = create.element
        preparer = self.preparer

        text = "\nCREATE "
        if table._prefixes:
            text += " ".join(table._prefixes) + " "
        text += "EXTERNAL TABLE " + preparer.format_table(table) + " "

        create_table_suffix = self.create_table_suffix(table)
        if create_table_suffix:
            text += create_table_suffix + " "

        text += "("

        separator = "\n"

        partition_keys = table.dialect_options['hive']._non_defaults.get(
            'partition_by', [])
        partition_cols = []

        for create_column in create.columns:
            column = create_column.element
            try:
                processed = self.process(create_column, first_pk=False)
                if processed is not None:
                    if column.name in partition_keys:
                        partition_cols.append(processed)
                        continue
                    text += separator
                    separator = ", \n"
                    text += "\t" + processed
            except exc.CompileError as ce:
                util.raise_from_cause(
                    exc.CompileError(
                        util.u("(in table '%s', column '%s'): %s") %
                        (table.description, column.name, ce.args[0])))

        table.dialect_options['hive']._non_defaults[
            'partition_by'] = partition_cols

        text += "\n)%s\n\n" % self.post_create_table(table)
        return text
    def _redshift_visit_create_table(compiler, create):
        table = create.element
        preparer = compiler.preparer

        text = "\nCREATE "
        if table._prefixes:
            text += " ".join(table._prefixes) + " "
        text += "TABLE IF NOT EXISTS " + preparer.format_table(table) + " "

        create_table_suffix = compiler.create_table_suffix(table)
        if create_table_suffix:
            text += create_table_suffix + " "

        text += "("

        separator = "\n"

        # if only one primary key, specify it along with the column
        first_pk = False
        for create_column in create.columns:
            column = create_column.element
            try:
                processed = compiler.process(create_column,
                                             first_pk=column.primary_key
                                             and not first_pk)
                if processed is not None:
                    text += separator
                    separator = ", \n"
                    text += "\t" + processed
                if column.primary_key:
                    first_pk = True
            except exc.CompileError as ce:
                util.raise_from_cause(
                    exc.CompileError(
                        util.u("(in table '%s', column '%s'): %s") %
                        (table.description, column.name, ce.args[0])))

        const = compiler.create_table_constraints(
            table,
            _include_foreign_key_constraints=create.
            include_foreign_key_constraints,  # noqa
        )
        if const:
            text += separator + "\t" + const

        text += "\n)%s\n\n" % compiler.post_create_table(table)
        return text
    def connect(self, *cargs, **cparams):
        try:
            pool = self.get_pool(*cargs, **cparams)

            def is_healthy(conn):
                if self.run_query(conn, ("select distinct(table_schema) from "
                                         "information_schema.tables where "
                                         "table_schema = 'ceilometer'")):
                    return True
                else:
                    return False

            if not self.nodes:
                conn = pool._dialect.connect(*cargs, **cparams)
                self.nodes = self.retrieve_nodes(conn)
                if not self.nodes:
                    self.nodes.append(cparams['host'])
                conn.close()

            self._conn_exception = None
            for node in self.nodes:
                cparams['host'] = node
                try:
                    conn = pool._dialect.connect(*cargs, **cparams)
                    if is_healthy(conn):
                        return conn
                except pool._dialect.dbapi.Error as e:
                    self._conn_exception = e
                    pass

            if self._conn_exception:
                raise self._conn_exception

            return None
        except pool._dialect.dbapi.Error as e:
            invalidated = pool._dialect.is_disconnect(e, None, None)
            util.raise_from_cause(
                exc.DBAPIError.instance(None,
                                        None,
                                        e,
                                        pool._dialect.dbapi.Error,
                                        connection_invalidated=invalidated),
                sys.exc_info())
    def connect(self, *cargs, **cparams):
        try:
            pool = self.get_pool(*cargs, **cparams)
            def is_healthy(conn):
                if self.run_query(conn, 
                                  ("select distinct(table_schema) from " 
                                   "information_schema.tables where " 
                                   "table_schema = 'ceilometer'")):
                    return True
                else:
                    return False

            if not self.nodes:
                conn = pool._dialect.connect(*cargs, **cparams)
                self.nodes = self.retrieve_nodes(conn)
                if not self.nodes:
                    self.nodes.append(cparams['host'])
                conn.close()

            self._conn_exception = None
            for node in self.nodes:
                cparams['host'] = node
                try:
                    conn = pool._dialect.connect(*cargs, **cparams)
                    if is_healthy(conn):
                        return conn
                except pool._dialect.dbapi.Error as e:
                    self._conn_exception = e
                    pass

            if self._conn_exception:
                raise self._conn_exception

            return None
        except pool._dialect.dbapi.Error as e:
            invalidated = pool._dialect.is_disconnect(e, None, None)
            util.raise_from_cause(
                exc.DBAPIError.instance(
                    None, None, e, pool._dialect.dbapi.Error,
                    connection_invalidated=invalidated
                ), sys.exc_info()
            )        
Exemple #8
0
    async def visit_metadata(self, metadata):
        if self.tables is not None:
            tables = self.tables
        else:
            tables = list(metadata.tables.values())

        try:
            unsorted_tables = []
            for t in tables:
                if await self._can_drop_table(t):
                    unsorted_tables.append(t)
            collection = list(
                reversed(
                    sort_tables_and_constraints(
                        unsorted_tables,
                        filter_fn=lambda constraint: False if not self.dialect.
                        supports_alter or constraint.name is None else None,
                    )))
        except exc.CircularDependencyError as err2:
            if not self.dialect.supports_alter:
                util.warn(
                    "Can't sort tables for DROP; an "
                    "unresolvable foreign key "
                    "dependency exists between tables: %s, and backend does "
                    "not support ALTER.  To restore at least a partial sort, "
                    "apply use_alter=True to ForeignKey and "
                    "ForeignKeyConstraint "
                    "objects involved in the cycle to mark these as known "
                    "cycles that will be ignored." %
                    (", ".join(sorted([t.fullname for t in err2.cycles]))))
                collection = [(t, ()) for t in unsorted_tables]
            else:
                util.raise_from_cause(
                    exc.CircularDependencyError(
                        err2.args[0],
                        err2.cycles,
                        err2.edges,
                        msg="Can't sort tables for DROP; an unresolvable "
                        "foreign key dependency exists between tables: %s."
                        "  Please ensure that the ForeignKey and "
                        "ForeignKeyConstraint objects involved in the "
                        "cycle have names so that they can be dropped "
                        "using DROP CONSTRAINT." %
                        (", ".join(sorted([t.fullname for t in err2.cycles]))),
                    ))

        seq_coll = []
        for s in metadata._sequences.values():
            if s.column is None and await self._can_drop_sequence(s):
                seq_coll.append(s)

        event_collection = [t for (t, fks) in collection if t is not None]

        await _Async(metadata.dispatch.before_drop)(
            metadata,
            self.connection,
            tables=event_collection,
            checkfirst=self.checkfirst,
            _ddl_runner=self,
        )

        for table, fkcs in collection:
            if table is not None:
                await self.traverse_single(table,
                                           drop_ok=True,
                                           _is_metadata_operation=True)
            else:
                for fkc in fkcs:
                    await self.traverse_single(fkc)

        for seq in seq_coll:
            await self.traverse_single(seq, drop_ok=True)

        await _Async(metadata.dispatch.after_drop)(
            metadata,
            self.connection,
            tables=event_collection,
            checkfirst=self.checkfirst,
            _ddl_runner=self,
        )