コード例 #1
0
    def get_delete_sql(self, oids):
        escaped_catalog_name = escapeDoubleQuoteInSQLString(self._catalog_name)
        escaped_pk_name = escapeDoubleQuoteInSQLString(self._pk_name)

        delete_sql = 'BEGIN;set allow_system_table_mods="dml";delete from {0} where {1} in ({2});COMMIT;'

        return delete_sql.format(escaped_catalog_name, escaped_pk_name, ','.join(str(oid) for oid in oids))
コード例 #2
0
ファイル: gprestore_filter.py プロジェクト: AnLingm/gpdb
def process_data(dump_schemas, dump_tables, fdin, fdout, change_schema=None, schema_level_restore_list=None):
    schema, table, schema_wo_escaping = None, None, None
    output = False
    #PYTHON PERFORMANCE IS TRICKY .... THIS CODE IS LIKE THIS BECAUSE ITS FAST
    for line in fdin:
        if (line[0] == set_start) and line.startswith(search_path_expr):
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(schema, False)
            if ((dump_schemas and schema_wo_escaping in dump_schemas) or
                (schema_level_restore_list and schema_wo_escaping in schema_level_restore_list)):
                if change_schema:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(schema, escapeDoubleQuoteInSQLString(change_schema))
                else:
                    schema = schema_wo_escaping
                fdout.write(line)
        elif (line[0] == copy_start) and line.startswith(copy_expr) and line.endswith(copy_expr_end):
            table = extract_table(line)
            table = removeEscapingDoubleQuoteInSQLString(table, False)
            if (schema_level_restore_list and schema_wo_escaping in schema_level_restore_list) or (dump_tables and (schema_wo_escaping, table) in dump_tables):
                output = True
        elif output and (line[0] == copy_end_start) and line.startswith(copy_end_expr):
            table = None
            output = False
            fdout.write(line)

        if output:
            fdout.write(line)
コード例 #3
0
ファイル: gprestore_filter.py プロジェクト: yezhiweicmss/gpdb
def process_data(dump_schemas, dump_tables, fdin, fdout, change_schema=None, schema_level_restore_list=None):
    schema, table, schema_wo_escaping = None, None, None
    output = False
    #PYTHON PERFORMANCE IS TRICKY .... THIS CODE IS LIKE THIS BECAUSE ITS FAST
    for line in fdin:
        if (line[0] == set_start) and line.startswith(search_path_expr):
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(schema, False)
            if ((dump_schemas and schema_wo_escaping in dump_schemas) or
                (schema_level_restore_list and schema_wo_escaping in schema_level_restore_list)):
                if change_schema:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(schema, escapeDoubleQuoteInSQLString(change_schema))
                else:
                    schema = schema_wo_escaping
                fdout.write(line)
        elif (line[0] == copy_start) and line.startswith(copy_expr) and line.endswith(copy_expr_end):
            table = extract_table(line)
            table = removeEscapingDoubleQuoteInSQLString(table, False)
            if (schema_level_restore_list and schema_wo_escaping in schema_level_restore_list) or (dump_tables and (schema_wo_escaping, table) in dump_tables):
                output = True
        elif output and (line[0] == copy_end_start) and line.startswith(copy_end_expr):
            table = None
            output = False
            fdout.write(line)

        if output:
            fdout.write(line)
コード例 #4
0
    def get_delete_sql(self, oids):
        escaped_catalog_name = escapeDoubleQuoteInSQLString(self._catalog_name)
        escaped_pk_name = escapeDoubleQuoteInSQLString(self._pk_name)

        delete_sql = 'BEGIN;set allow_system_table_mods="dml";delete from {0} where {1} in ({2});COMMIT;'

        return delete_sql.format(escaped_catalog_name, escaped_pk_name,
                                 ','.join(str(oid) for oid in oids))
コード例 #5
0
def _handle_change_schema(schema_to_replace, change_schema, line):
    if change_schema and len(change_schema) > 0:
        # change schema name can contain special chars including white space, double quote that.
        # if original schema name is already quoted, replaced it with quoted change schema name
        quoted_schema = '"' + schema_to_replace + '"'
        if quoted_schema in line:
            line = line.replace(quoted_schema,
                                escapeDoubleQuoteInSQLString(change_schema))
        else:
            line = line.replace(schema_to_replace,
                                escapeDoubleQuoteInSQLString(change_schema))
    return line
コード例 #6
0
 def drop_leaked_schemas(self, db_connection):
     leaked_schemas = self.__get_leaked_schemas(db_connection)
     for leaked_schema in leaked_schemas:
         escaped_schema_name = escapeDoubleQuoteInSQLString(leaked_schema)
         db_connection.query('DROP SCHEMA IF EXISTS %s CASCADE;' %
                             (escaped_schema_name))
     return leaked_schemas
コード例 #7
0
 def __init__(self, catalog_table_obj, issues, pk_name):
     self.catalog_table_obj = catalog_table_obj
     catalog_name = self.catalog_table_obj.getTableName()
     self._escaped_catalog_name = escapeDoubleQuoteInSQLString(catalog_name)
     self._issues = issues
     self._pk_name = pk_name
コード例 #8
0
 def __init__(self, catalog_table_obj,  issues, pk_name):
     self.catalog_table_obj = catalog_table_obj
     catalog_name = self.catalog_table_obj.getTableName()
     self._escaped_catalog_name = escapeDoubleQuoteInSQLString(catalog_name)
     self._issues = issues
     self._pk_name = pk_name
コード例 #9
0
ファイル: gprestore_filter.py プロジェクト: AnLingm/gpdb
def process_schema(dump_schemas, dump_tables, fdin, fdout, change_schema=None, schema_level_restore_list=None):
    """
    Filter the dump file line by line from restore
    dump_schemas: set of schemas to restore
    dump_tables: set of (schema, table) tuple to restore
    fdin: stdin from dump file
    fdout: to write filtered content to stdout
    change_schema_name: different schema name to restore
    schema_level_restore_list: list of schemas to restore all tables under them
    """

    schema, table = None, None
    line_buff = ''

    # to help decide whether or not to filter out
    output = False

    # to help exclude SET clause within a function's ddl statement
    function_ddl = False

    further_investigation_required = False
    search_path = True
    passedDropSchemaSection = False

    for line in fdin:
        if search_path and (line[0] == set_start) and line.startswith(search_path_expr):
            further_investigation_required = False
            # schema in set search_path line is already escaped in dump file
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(schema, False)
            if (dump_schemas and schema_wo_escaping in dump_schemas or
                schema_level_restore_list and schema_wo_escaping in schema_level_restore_list):
                if change_schema and len(change_schema) > 0:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(schema, escapeDoubleQuoteInSQLString(change_schema))
                output = True
                search_path = False
            else:
                output = False
        # set_assignment must be in the line to filter out dump line: SET SUBPARTITION TEMPLATE
        elif (line[0] == set_start) and line.startswith(set_expr) and set_assignment in line and not function_ddl:
            output = True
        elif (line[0] == drop_start) and line.startswith(drop_expr):
            if line.startswith(drop_table_expr) or line.startswith(drop_external_table_expr):
                if passedDropSchemaSection:
                    output = False
                else:
                    if line.startswith(drop_table_expr):
                        output = check_dropped_table(line, dump_tables, schema_level_restore_list, drop_table_expr)
                    else:
                        output = check_dropped_table(line, dump_tables, schema_level_restore_list, drop_external_table_expr)

            else:
                output = False
        elif line[:2] == comment_start_expr and line.startswith(comment_expr):
            # Parse the line using get_table_info for SCHEMA relation type as well,
            # if type is SCHEMA, then the value of name returned is schema's name, and returned schema is represented by '-'
            name, type, schema = get_table_info(line, comment_expr)
            output = False
            function_ddl = False
            passedDropSchemaSection = True
            if type in ['TABLE', 'EXTERNAL TABLE']:
                further_investigation_required = False
                output = check_valid_table(schema, name, dump_tables, schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['CONSTRAINT']:
                further_investigation_required = True
                if (dump_schemas and schema in dump_schemas) or (schema_level_restore_list and schema in schema_level_restore_list):
                    line_buff = line 
            elif type in ['ACL']:
                output = check_valid_table(schema, name, dump_tables, schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['SCHEMA']:
                output = check_valid_schema(name, dump_schemas, schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['FUNCTION']:
                function_ddl = True
        elif (line[:2] == comment_start_expr) and (line.startswith(comment_data_expr_a) or line.startswith(comment_data_expr_b)):
            passedDropSchemaSection = True
            further_investigation_required = False
            if line.startswith(comment_data_expr_a):
                name, type, schema = get_table_info(line, comment_data_expr_a)
            else:
                name, type, schema = get_table_info(line, comment_data_expr_b)
            if type == 'TABLE DATA':
                output = check_valid_table(schema, name, dump_tables, schema_level_restore_list)
                if output:
                    search_path = True
            else:
                output = False  
        elif further_investigation_required:
            if line.startswith(alter_table_only_expr) or line.startswith(alter_table_expr):
                further_investigation_required = False
                # Get the full qualified table name with the correct split
                if line.startswith(alter_table_only_expr):
                    tablename = get_table_from_alter_table(line, alter_table_only_expr)
                else:
                    tablename = get_table_from_alter_table(line, alter_table_expr)
                tablename = checkAndRemoveEnclosingDoubleQuote(tablename)
                tablename = removeEscapingDoubleQuoteInSQLString(tablename, False)
                output = check_valid_table(schema, tablename, dump_tables, schema_level_restore_list)
                if output:
                    if line_buff:
                        fdout.write(line_buff)
                        line_buff = ''
                    search_path = True
        else:
            further_investigation_required = False

        if output:
            fdout.write(line)
コード例 #10
0
def process_schema(dump_schemas,
                   dump_tables,
                   fdin,
                   fdout,
                   change_schema=None,
                   schema_level_restore_list=None):
    """
    Filter the dump file line by line from restore
    dump_schemas: set of schemas to restore
    dump_tables: set of (schema, table) tuple to restore
    fdin: stdin from dump file
    fdout: to write filtered content to stdout
    change_schema_name: different schema name to restore
    schema_level_restore_list: list of schemas to restore all tables under them
    """

    schema, table = None, None
    line_buff = ''

    # to help decide whether or not to filter out
    output = False

    # to help exclude SET clause within a function's ddl statement
    function_ddl = False

    further_investigation_required = False
    search_path = True
    passedDropSchemaSection = False

    for line in fdin:
        if search_path and (
                line[0] == set_start) and line.startswith(search_path_expr):
            further_investigation_required = False
            # schema in set search_path line is already escaped in dump file
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(
                schema, False)
            if (dump_schemas and schema_wo_escaping in dump_schemas
                    or schema_level_restore_list
                    and schema_wo_escaping in schema_level_restore_list):
                if change_schema and len(change_schema) > 0:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(
                            quoted_schema,
                            escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(
                            schema,
                            escapeDoubleQuoteInSQLString(change_schema))
                output = True
                search_path = False
            else:
                output = False
        # set_assignment must be in the line to filter out dump line: SET SUBPARTITION TEMPLATE
        elif (line[0] == set_start) and line.startswith(
                set_expr) and set_assignment in line and not function_ddl:
            output = True
        elif (line[0] == drop_start) and line.startswith(drop_expr):
            if line.startswith(drop_table_expr) or line.startswith(
                    drop_external_table_expr):
                if passedDropSchemaSection:
                    output = False
                else:
                    if line.startswith(drop_table_expr):
                        output = check_dropped_table(
                            line, dump_tables, schema_level_restore_list,
                            drop_table_expr)
                    else:
                        output = check_dropped_table(
                            line, dump_tables, schema_level_restore_list,
                            drop_external_table_expr)

            else:
                output = False
        elif line[:2] == comment_start_expr and line.startswith(comment_expr):
            # Parse the line using get_table_info for SCHEMA relation type as well,
            # if type is SCHEMA, then the value of name returned is schema's name, and returned schema is represented by '-'
            name, type, schema = get_table_info(line, comment_expr)
            output = False
            function_ddl = False
            passedDropSchemaSection = True
            if type in ['TABLE', 'EXTERNAL TABLE']:
                further_investigation_required = False
                output = check_valid_table(schema, name, dump_tables,
                                           schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['CONSTRAINT']:
                further_investigation_required = True
                if (dump_schemas and schema in dump_schemas) or (
                        schema_level_restore_list
                        and schema in schema_level_restore_list):
                    line_buff = line
            elif type in ['ACL']:
                output = check_valid_table(schema, name, dump_tables,
                                           schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['SCHEMA']:
                output = check_valid_schema(name, dump_schemas,
                                            schema_level_restore_list)
                if output:
                    search_path = True
            elif type in ['FUNCTION']:
                function_ddl = True
        elif (line[:2] == comment_start_expr) and (
                line.startswith(comment_data_expr_a)
                or line.startswith(comment_data_expr_b)):
            passedDropSchemaSection = True
            further_investigation_required = False
            if line.startswith(comment_data_expr_a):
                name, type, schema = get_table_info(line, comment_data_expr_a)
            else:
                name, type, schema = get_table_info(line, comment_data_expr_b)
            if type == 'TABLE DATA':
                output = check_valid_table(schema, name, dump_tables,
                                           schema_level_restore_list)
                if output:
                    search_path = True
            else:
                output = False
        elif further_investigation_required:
            if line.startswith(alter_table_only_expr) or line.startswith(
                    alter_table_expr):
                further_investigation_required = False
                # Get the full qualified table name with the correct split
                if line.startswith(alter_table_only_expr):
                    tablename = get_table_from_alter_table(
                        line, alter_table_only_expr)
                else:
                    tablename = get_table_from_alter_table(
                        line, alter_table_expr)
                tablename = checkAndRemoveEnclosingDoubleQuote(tablename)
                tablename = removeEscapingDoubleQuoteInSQLString(
                    tablename, False)
                output = check_valid_table(schema, tablename, dump_tables,
                                           schema_level_restore_list)
                if output:
                    if line_buff:
                        fdout.write(line_buff)
                        line_buff = ''
                    search_path = True
        else:
            further_investigation_required = False

        if output:
            fdout.write(line)
コード例 #11
0
ファイル: gprestore_filter.py プロジェクト: 50wu/gpdb
def process_schema(dump_schemas, dump_tables, fdin, fdout, change_schema=None, schema_level_restore_list=None):
    """
    Filter the dump file line by line from restore
    dump_schemas: set of schemas to restore
    dump_tables: set of (schema, table) tuple to restore
    fdin: stdin from dump file
    fdout: to write filtered content to stdout
    change_schema_name: different schema name to restore
    schema_level_restore_list: list of schemas to restore all tables under them
    """

    schema, table = None, None
    line_buff = ''

    # to help decide whether or not to filter out
    output = False

    # to help exclude SET clause within a function's ddl statement
    function_ddl = False

    further_investigation_required = False
    # we need to set search_path to true after every ddl change due to the
    # fact that the schema "set search_path" may change on the next ddl command
    search_path = True
    passedDropSchemaSection = False

    cast_func_schema = None
    change_cast_func_schema = False

    in_block = False

    for line in fdin:
        # NOTE: We are checking the first character before actually verifying
        # the line with "startswith" due to the performance gain.
        if in_block:
            output = True
        elif (line[0] == begin_start) and line.startswith(begin_expr):
            in_block = True
            output = True
        elif (line[0] == end_start) and line.startswith(end_expr):
            in_block = False
            output = True
        elif search_path and (line[0] == set_start) and line.startswith(search_path_expr):
            # NOTE: The goal is to output the correct mapping to the search path
            # for the schema

            further_investigation_required = False
            # schema in set search_path line is already escaped in dump file
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(schema, False)
            if schema == "pg_catalog":
                output = True
            elif (dump_schemas and schema_wo_escaping in dump_schemas or
                schema_level_restore_list and schema_wo_escaping in schema_level_restore_list):
                if change_schema and len(change_schema) > 0:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(schema, escapeDoubleQuoteInSQLString(change_schema))
                cast_func_schema = schema # Save the schema in case we need to replace a cast's function's schema later
                output = True
                search_path = False
            else:
                output = False
        # set_assignment must be in the line to filter out dump line: SET SUBPARTITION TEMPLATE
        elif (line[0] == set_start) and line.startswith(set_expr) and set_assignment in line and not function_ddl:
            output = True
        elif (line[0] == drop_start) and line.startswith(drop_expr):
            if line.startswith(drop_table_expr) or line.startswith(drop_external_table_expr):
                if passedDropSchemaSection:
                    output = False
                else:
                    if line.startswith(drop_table_expr):
                        output = check_dropped_table(line, dump_tables, schema_level_restore_list, drop_table_expr)
                    else:
                        output = check_dropped_table(line, dump_tables, schema_level_restore_list,
                                                     drop_external_table_expr)
            else:
                output = False
        elif line[:2] == comment_start_expr and line.startswith(comment_expr):
            # Parse the line using get_table_info for SCHEMA relation type as well,
            # if type is SCHEMA, then the value of name returned is schema's name, and returned schema is represented by '-'
            name, type, schema = get_table_info(line, comment_expr)
            output = False
            function_ddl = False
            passedDropSchemaSection = True

            if type in ['SCHEMA']:
                # Make sure that schemas are created before restoring the desired tables.
                output = check_valid_schema(name, dump_schemas, schema_level_restore_list)
            elif type in ['TABLE', 'EXTERNAL TABLE', 'VIEW', 'SEQUENCE']:
                further_investigation_required = False
                output = check_valid_relname(schema, name, dump_tables, schema_level_restore_list)
            elif type in ['CONSTRAINT']:
                further_investigation_required = True
                if check_valid_schema(schema, dump_schemas, schema_level_restore_list):
                    line_buff = line
            elif type in ['ACL']:
                output = check_valid_relname(schema, name, dump_tables, schema_level_restore_list)
            elif type in ['FUNCTION']:
                function_ddl = True
                output = check_valid_schema(schema, dump_schemas, schema_level_restore_list)
            elif type in ['CAST', 'PROCEDURAL LANGUAGE']: # Restored to pg_catalog, so always filtered in
                output = True
                change_cast_func_schema = True # When changing schemas, we need to ensure that functions used in casts reference the new schema

            if output:
                search_path = True

        elif (line[:2] == comment_start_expr) and (line.startswith(comment_data_expr_a) or line.startswith(comment_data_expr_b)):
            passedDropSchemaSection = True
            further_investigation_required = False
            if line.startswith(comment_data_expr_a):
                name, type, schema = get_table_info(line, comment_data_expr_a)
            else:
                name, type, schema = get_table_info(line, comment_data_expr_b)
            if type == 'TABLE DATA':
                output = check_valid_relname(schema, name, dump_tables, schema_level_restore_list)
                if output:
                    search_path = True
            else:
                output = False
        elif further_investigation_required:
            if line.startswith(alter_table_only_expr) or line.startswith(alter_table_expr):
                further_investigation_required = False

                # Get the full qualified table name with the correct split
                if line.startswith(alter_table_only_expr):
                    tablename = get_table_from_alter_table(line, alter_table_only_expr)
                else:
                    tablename = get_table_from_alter_table(line, alter_table_expr)

                tablename = checkAndRemoveEnclosingDoubleQuote(tablename)
                tablename = removeEscapingDoubleQuoteInSQLString(tablename, False)
                output = check_valid_relname(schema, tablename, dump_tables, schema_level_restore_list)

                if output:
                    if line_buff:
                        fdout.write(line_buff)
                        line_buff = ''
                    search_path = True
        elif change_cast_func_schema:
            if "CREATE CAST" in line and "WITH FUNCTION" in line:
                change_cast_func_schema = False
                if change_schema and len(change_schema) > 0:
                    quoted_schema = '"' + cast_func_schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema))
                    else:
                        line = line.replace(cast_func_schema, escapeDoubleQuoteInSQLString(change_schema))
                cast_func_schema = None
        else:
            further_investigation_required = False

        if output:
            fdout.write(line)
コード例 #12
0
ファイル: leaked_schema_dropper.py プロジェクト: 50wu/gpdb
 def drop_leaked_schemas(self, db_connection):
     leaked_schemas = self.__get_leaked_schemas(db_connection)
     for leaked_schema in leaked_schemas:
         escaped_schema_name = escapeDoubleQuoteInSQLString(leaked_schema)
         db_connection.query('DROP SCHEMA IF EXISTS %s CASCADE;' % (escaped_schema_name))
     return leaked_schemas
コード例 #13
0
def process_schema(dump_schemas, dump_tables, fdin, fdout, change_schema_name=None, schema_level_restore_list=None):
    """
    Filter the dump file line by line from restore
    dump_schemas: set of schemas to restore
    dump_tables: set of (schema, table) tuple to restore
    fdin: stdin from dump file
    fdout: to write filtered content to stdout
    change_schema_name: different schema name to restore
    schema_level_restore_list: list of schemas to restore all tables under them
    """
    schema = None
    schema_wo_escaping = None
    type = None
    schema_buff = ""
    output = False
    further_investigation_required = False
    search_path = False
    line_buf = None
    for line in fdin:
        if (line[0] == set_start) and line.startswith(search_path_expr):
            output = False
            further_investigation_required = False
            # schema in set search_path line is already escaped in dump file
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(schema, False)
            if (dump_schemas and schema_wo_escaping in dump_schemas) or (
                schema_level_restore_list and schema_wo_escaping in schema_level_restore_list
            ):
                if change_schema_name and len(change_schema_name) > 0:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(quoted_schema, escapeDoubleQuoteInSQLString(change_schema_name))
                    else:
                        line = line.replace(schema, escapeDoubleQuoteInSQLString(change_schema_name))
                search_path = True
                schema_buff = line
        elif (line[0] == set_start) and line.startswith(set_expr):
            output = True
        elif line[:2] == comment_start_expr:
            if line.startswith(comment_expr):
                type = get_type(line)
            output = False
        elif type and (line[:7] == "CREATE " or line[:8] == "REPLACE "):
            if type == "RULE":
                output = check_table(
                    schema_wo_escaping, line, " TO ", dump_tables, schema_level_restore_list, is_rule=True
                )
            elif type == "INDEX":
                output = check_table(schema_wo_escaping, line, " ON ", dump_tables, schema_level_restore_list)
            elif type == "TRIGGER":
                line_buf = line
                further_investigation_required = True
        elif type and type in ["CONSTRAINT", "FK CONSTRAINT"] and line[:12] == "ALTER TABLE ":
            if line.startswith("ALTER TABLE ONLY"):
                output = check_table(schema_wo_escaping, line, " ONLY ", dump_tables, schema_level_restore_list)
            else:
                output = check_table(schema_wo_escaping, line, " TABLE ", dump_tables, schema_level_restore_list)
        elif further_investigation_required:
            if type == "TRIGGER":
                output = check_table(schema_wo_escaping, line, " ON ", dump_tables, schema_level_restore_list)
                if not output:
                    line_buf = None
                further_investigation_required = False

        if output:
            if search_path:
                fdout.write(schema_buff)
                schema_buff = None
                search_path = False
            if line_buf:
                fdout.write(line_buf)
                line_buf = None
            fdout.write(line)
コード例 #14
0
def process_schema(dump_schemas,
                   dump_tables,
                   fdin,
                   fdout,
                   change_schema_name=None,
                   schema_level_restore_list=None):
    """
    Filter the dump file line by line from restore
    dump_schemas: set of schemas to restore
    dump_tables: set of (schema, table) tuple to restore
    fdin: stdin from dump file
    fdout: to write filtered content to stdout
    change_schema_name: different schema name to restore
    schema_level_restore_list: list of schemas to restore all tables under them
    """
    schema = None
    schema_wo_escaping = None
    type = None
    schema_buff = ''
    output = False
    further_investigation_required = False
    search_path = False
    line_buf = None
    for line in fdin:
        if (line[0] == set_start) and line.startswith(search_path_expr):
            output = False
            further_investigation_required = False
            # schema in set search_path line is already escaped in dump file
            schema = extract_schema(line)
            schema_wo_escaping = removeEscapingDoubleQuoteInSQLString(
                schema, False)
            if ((dump_schemas and schema_wo_escaping in dump_schemas)
                    or (schema_level_restore_list
                        and schema_wo_escaping in schema_level_restore_list)):
                if change_schema_name and len(change_schema_name) > 0:
                    # change schema name can contain special chars including white space, double quote that.
                    # if original schema name is already quoted, replaced it with quoted change schema name
                    quoted_schema = '"' + schema + '"'
                    if quoted_schema in line:
                        line = line.replace(
                            quoted_schema,
                            escapeDoubleQuoteInSQLString(change_schema_name))
                    else:
                        line = line.replace(
                            schema,
                            escapeDoubleQuoteInSQLString(change_schema_name))
                search_path = True
                schema_buff = line
        elif (line[0] == set_start) and line.startswith(set_expr):
            output = True
        elif line[:2] == comment_start_expr:
            if line.startswith(comment_expr):
                type = get_type(line)
            output = False
        elif type and (line[:7] == 'CREATE ' or line[:8] == 'REPLACE '):
            if type == 'RULE':
                output = check_table(schema_wo_escaping,
                                     line,
                                     ' TO ',
                                     dump_tables,
                                     schema_level_restore_list,
                                     is_rule=True)
            elif type == 'INDEX':
                output = check_table(schema_wo_escaping, line, ' ON ',
                                     dump_tables, schema_level_restore_list)
            elif type == 'TRIGGER':
                line_buf = line
                further_investigation_required = True
        elif type and type in ['CONSTRAINT', 'FK CONSTRAINT'
                               ] and line[:12] == 'ALTER TABLE ':
            if line.startswith('ALTER TABLE ONLY'):
                output = check_table(schema_wo_escaping, line, ' ONLY ',
                                     dump_tables, schema_level_restore_list)
            else:
                output = check_table(schema_wo_escaping, line, ' TABLE ',
                                     dump_tables, schema_level_restore_list)
        elif further_investigation_required:
            if type == 'TRIGGER':
                output = check_table(schema_wo_escaping, line, ' ON ',
                                     dump_tables, schema_level_restore_list)
                if not output:
                    line_buf = None
                further_investigation_required = False

        if output:
            if search_path:
                fdout.write(schema_buff)
                schema_buff = None
                search_path = False
            if line_buf:
                fdout.write(line_buf)
                line_buf = None
            fdout.write(line)