Beispiel #1
0
def get_routine_privs(server, db_name, routine_name):
    """ Get the list of grantees and their privileges for a routine.

    server[in]          Instance of Server class, where the query will be
                        executed.
    db_name[in]         Name of the database where the table belongs to.
    routine_name[in]    Name of the routine to check.

    Returns list of tuples (<GRANTEE>, <SET OF GRANTS>).
    """
    tpl_lst = []
    # remove backticks if necesssary
    if is_quoted_with_backticks(db_name):
        db_name = remove_backtick_quoting(db_name)
    if is_quoted_with_backticks(routine_name):
        routine_name = remove_backtick_quoting(routine_name)

    # Build query
    query = _PROCS_PRIV_QUERY.format(db_name, routine_name)
    res = server.exec_query(query)
    for user, host, grants in res:
        grants = set((grant.upper() for grant in grants.split(',')))
        # remove USAGE privilege since it does nothing.
        grants.discard('USAGE')
        if grants:
            tpl_lst.append(("'{0}'@'{1}'".format(user, host), grants))
    return tpl_lst
Beispiel #2
0
def get_table_privs(server, db_name, table_name):
    """ Get the list of grantees and their privileges for a specific table.

    server[in]          Instance of Server class, where the query will be
                        executed.
    db_name[in]     Name of the database where the table belongs to.
    table_name[in]  Name of the table to check.

    Returns list of tuples (<Grantee>, <SET OF GRANTS>).
    """
    tpl_lst = []
    # Remove backticks if necessary
    if is_quoted_with_backticks(db_name):
        db_name = remove_backtick_quoting(db_name)
    if is_quoted_with_backticks(table_name):
        table_name = remove_backtick_quoting(table_name)

    # Build query
    query = _TABLE_PRIV_QUERY.format(db_name, table_name)
    res = server.exec_query(query)
    for grantee, grant_option, grants in res:
        grants = set((grant.upper() for grant in grants.split(',')))
        # remove USAGE privilege since it does nothing.
        grants.discard('USAGE')
        if grants:
            if 'Y' in grant_option.upper():
                grants.add('GRANT OPTION')
            tpl_lst.append((grantee, grants))

    return tpl_lst
Beispiel #3
0
    def get_create_statement(self, db, name, obj_type):
        """Return the create statement for the object

        db[in]             Database name
        name[in]           Name of the object
        obj_type[in]       Object type (string) e.g. DATABASE
                           Note: this is used to form the correct SHOW command

        Returns create statement
        """

        row = None
        q_name = name if is_quoted_with_backticks(name) else quote_with_backticks(name)
        if obj_type == _DATABASE:
            name_str = q_name
        else:
            q_db = db if is_quoted_with_backticks(db) else quote_with_backticks(db)
            name_str = q_db + "." + q_name
        row = self.source.exec_query("SHOW CREATE %s %s" % (obj_type, name_str))

        create_statement = None
        if row:
            if obj_type == _TABLE or obj_type == _VIEW or obj_type == _DATABASE:
                create_statement = row[0][1]
            elif obj_type == _EVENT:
                create_statement = row[0][3]
            else:
                create_statement = row[0][2]
        return create_statement
Beispiel #4
0
def database_diff(server1_val, server2_val, db1, db2, options):
    """Find differences among objects from two databases.
    
    This method compares the object definitions among two databases. If any
    differences are found, the differences are printed in the format chosen
    and the method returns False. A True result is returned only when all
    object definitions match.
    
    The method will stop and return False on the first difference found unless
    the option force is set to True (default = False).
    
    server1_val[in]    a dictionary containing connection information for the
                       first server including:
                       (user, password, host, port, socket)
    server2_val[in]    a dictionary containing connection information for the
                       second server including:
                       (user, password, host, port, socket)
    db1[in]            the first database in the compare
    db2[in]            the second database in the compare
    options[in]        a dictionary containing the options for the operation:
                       (quiet, verbosity, difftype, force)

    Returns bool True if all object match, False if partial match
    """
    from mysql.utilities.common.dbcompare import get_common_objects
    from mysql.utilities.common.dbcompare import server_connect
    
    force = options.get("force", False)

    server1, server2 = server_connect(server1_val, server2_val,
                                      db1, db2, options)
    in_both, in_db1, in_db2 = get_common_objects(server1, server2,
                                                 db1, db2, True, options)
    in_both.sort()
    if (len(in_db1) > 0 or len(in_db2) > 0) and not force:
        return False
    
    # Do the diff for the databases themselves
    result = object_diff(server1, server2, db1, db2, options)
    if result is not None:
        success = False
        if not force:
            return False

    # For each that match, do object diff
    success = True
    for item in in_both:
        obj_name1 = quote_with_backticks(item[1][0]) \
                        if is_quoted_with_backticks(db1) else item[1][0]
        obj_name2 = quote_with_backticks(item[1][0]) \
                        if is_quoted_with_backticks(db2) else item[1][0]
        object1 = "%s.%s" % (db1, obj_name1)
        object2 = "%s.%s" % (db2, obj_name2)
        result = object_diff(server1, server2, object1, object2, options)
        if result is not None:
            success = False
            if not force:
                return False

    return success    
Beispiel #5
0
    def has_privilege(self, db, obj, access, allow_skip_grant_tables=True):
        """Check to see user has a specific access to a db.object.

        db[in]             Name of database
        obj[in]            Name of object
        access[in]         MySQL privilege to check (e.g. SELECT, SUPER, DROP)
        allow_skip_grant_tables[in]  If True, allow silent failure for
                           cases where the server is started with
                           --skip-grant-tables. Default=True

        Returns True if user has access, False if not
        """
        grants_enabled = self.server1.grant_tables_enabled()
        # If grants are disabled and it is Ok to allow skipped grant tables,
        # return True - privileges disabled so user can do anything.
        if allow_skip_grant_tables and not grants_enabled:
            return True
        # Convert privilege to upper cases.
        access = access.upper()

        # Get grant dictionary
        grant_dict = self.get_grants(globals_privs=True, as_dict=True)

        # If self has all privileges for all databases, no need to check,
        # simply return True
        if ("ALL PRIVILEGES" in grant_dict['*']['*'] and
                "GRANT OPTION" in grant_dict['*']['*']):
            return True

        # Quote db and obj with backticks if necessary
        if not is_quoted_with_backticks(db) and db != '*':
            db = quote_with_backticks(db)

        if not is_quoted_with_backticks(obj) and obj != '*':
            obj = quote_with_backticks(obj)

        # USAGE privilege is the same as no privileges,
        # so everyone has it.
        if access == "USAGE":
            return True
        # Even if we have ALL PRIVILEGES grant, we might not have WITH GRANT
        # OPTION privilege.
        # Check server wide grants.
        elif (access in grant_dict['*']['*'] or
                "ALL PRIVILEGES" in grant_dict['*']['*'] and
                access != "GRANT OPTION"):
            return True
        # Check database level grants.
        elif (access in grant_dict[db]['*'] or
                "ALL PRIVILEGES" in grant_dict[db]['*'] and
                access != "GRANT OPTION"):
            return True
        # Check object level grants.
        elif (access in grant_dict[db][obj] or
                "ALL PRIVILEGES" in grant_dict[db][obj] and
                access != "GRANT OPTION"):
            return True
        else:
            return False
Beispiel #6
0
    def _parse_grant_statement(statement):
        """ Returns a namedtuple with the parsed GRANT information.

        statement[in] Grant string in the sql format returned by the server.

        Returns named tuple with GRANT information or None.
        """

        grant_parse_re = re.compile(r"""
            GRANT\s(.+)?\sON\s # grant or list of grants
            (?:(?:PROCEDURE\s)|(?:FUNCTION\s))? # optional for routines only
            (?:(?:(\*|`?[^']+`?)\.(\*|`?[^']+`?)) # object where grant applies
            | ('[^']*'@'[^']*')) # For proxy grants user/host
            \sTO\s([^@]+@[\S]+) # grantee
            (?:\sIDENTIFIED\sBY\sPASSWORD(?:\s\'[^\']+\')?)? # optional pwd
            (?:\sREQUIRE\sSSL)? # optional SSL
            (\sWITH\sGRANT\sOPTION)? # optional grant option
            $ # End of grant statement
            """, re.VERBOSE)

        grant_tpl_factory = namedtuple("grant_info", "privileges proxy_user "
                                                     "db object user")
        match = re.match(grant_parse_re, statement)

        if match:
            # quote database name and object name with backticks
            if match.group(1).upper() != 'PROXY':
                db = match.group(2)
                if not is_quoted_with_backticks(db) and db != '*':
                    db = quote_with_backticks(db)
                obj = match.group(3)
                if not is_quoted_with_backticks(obj) and obj != '*':
                    obj = quote_with_backticks(obj)
            else:  # if it is not a proxy grant
                db = obj = None
            grants = grant_tpl_factory(
                # privileges
                set([priv.strip() for priv in match.group(1).split(",")]),
                match.group(4),  # proxied user
                db,  # database
                obj,  # object
                match.group(5),  # user
            )
            # If user has grant option, add it to the list of privileges
            if match.group(6) is not None:
                grants.privileges.add("GRANT OPTION")
        else:
            raise UtilError("Unable to parse grant statement "
                            "{0}".format(statement))

        return grants
Beispiel #7
0
    def drop(self, server, quiet, db_name=None):
        """Drop the database

        server[in]         A Server object
        quiet[in]          ignore error on drop
        db_name[in]        database name
                           (optional) If omitted, operation is performed
                           on the class instance table name.

        return True = database successfully dropped, False = error
        """

        db = None
        if db_name:
            db = db_name if is_quoted_with_backticks(db_name) else quote_with_backticks(db_name)
        else:
            db = self.q_db_name
        op_ok = False
        if quiet:
            try:
                res = server.exec_query("DROP DATABASE %s" % (db), self.query_options)
                op_ok = True
            except:
                pass
        else:
            res = server.exec_query("DROP DATABASE %s" % (db), self.query_options)
            op_ok = True
        return op_ok
Beispiel #8
0
def _drop_compare_object(server, db_name, tbl_name):
    """Drop the compare object table
    
    server[in]             Server instance
    db_name[in]            database name
    tbl_name[in]           table name
    """
    # Quote compare table appropriately with backticks
    q_db_name = db_name if is_quoted_with_backticks(db_name) \
                        else quote_with_backticks(db_name)
    if is_quoted_with_backticks(tbl_name):
        q_tbl_name = remove_backtick_quoting(tbl_name)
    else:
        q_tbl_name = tbl_name
    q_tbl_name = quote_with_backticks(
                                _COMPARE_TABLE_NAME.format(tbl=q_tbl_name))

    try:
        server.exec_query(_COMPARE_TABLE_DROP.format(db=q_db_name,
                                                     compare_tbl=q_tbl_name))
    except:
        pass
Beispiel #9
0
def _drop_compare_object(server, db_name, tbl_name):
    """Drop the compare object table
    
    server[in]             Server instance
    db_name[in]            database name
    tbl_name[in]           table name
    """
    # Quote compare table appropriately with backticks
    q_db_name = db_name if is_quoted_with_backticks(db_name) \
                        else quote_with_backticks(db_name)
    if is_quoted_with_backticks(tbl_name):
        q_tbl_name = remove_backtick_quoting(tbl_name)
    else:
        q_tbl_name = tbl_name
    q_tbl_name = quote_with_backticks(
        _COMPARE_TABLE_NAME.format(tbl=q_tbl_name))

    try:
        server.exec_query(
            _COMPARE_TABLE_DROP.format(db=q_db_name, compare_tbl=q_tbl_name))
    except:
        pass
Beispiel #10
0
    def get_column_metadata(self, columns=None):
        """Get information about the table for the bulk insert operation.

        This method builds lists that describe the metadata of the table. This
        includes lists for:

          column names
          column format for building VALUES clause
          blob fields - for use in generating INSERT/UPDATE for blobs
          text fields - for use in checking for single quotes

        columns[in]        if None, use EXPLAIN else use column list.
        """

        if columns is None:
            columns = self.server.exec_query("explain %s" % self.q_table)
        stop = len(columns)
        self.column_names = []
        self.q_column_names = []
        col_format_values = [''] * stop
        if columns is not None:
            for col in range(0, stop):
                if is_quoted_with_backticks(columns[col][0]):
                    self.column_names.append(
                        remove_backtick_quoting(columns[col][0]))
                    self.q_column_names.append(columns[col][0])
                else:
                    self.column_names.append(columns[col][0])
                    self.q_column_names.append(
                        quote_with_backticks(columns[col][0]))
                col_type_prefix = columns[col][1][0:4].lower()
                if col_type_prefix in ('varc', 'char', 'enum', 'set('):
                    self.text_columns.append(col)
                    col_format_values[col] = "'%s'"
                elif col_type_prefix in ("blob", "text"):
                    self.blob_columns.append(col)
                    col_format_values[col] = "%s"
                elif col_type_prefix in ("date", "time"):
                    col_format_values[col] = "'%s'"
                else:
                    col_format_values[col] = "%s"
        self.column_format = "%s%s%s" % \
                             (" (", ', '.join(col_format_values), ")")
Beispiel #11
0
    def create(self, server, db_name=None):
        """Create the database

        server[in]         A Server object
        db_name[in]        database name
                           (optional) If omitted, operation is performed
                           on the class instance table name.

        return True = database successfully created, False = error
        """

        db = None
        if db_name:
            db = db_name if is_quoted_with_backticks(db_name) else quote_with_backticks(db_name)
        else:
            db = self.q_db_name
        op_ok = False
        res = server.exec_query("CREATE DATABASE %s" % (db), self.query_options)
        op_ok = True
        return op_ok
Beispiel #12
0
    def copy_data(self, destination, cloning=False, new_db=None, connections=1):
        """Retrieve data from a table and copy to another server and database.

        Reads data from a table and inserts the correct INSERT statements into
        the file provided.

        Note: if connections < 1 - retrieve the data one row at-a-time

        destination[in]    Destination server
        cloning[in]        If True, we are copying on the same server
        new_db[in]         Rename the db to this name
        connections[in]    Number of threads(connections) to use for insert
        """

        if new_db is None:
            new_db = self.q_db_name
        else:
            # If need quote new_db identifier with backticks
            if not is_quoted_with_backticks(new_db):
                new_db = quote_with_backticks(new_db)

        num_conn = int(connections)

        if cloning:
            self._clone_data(new_db)
        else:
            # Read and copy the data
            pthreads = []
            for rows in self.retrieve_rows(num_conn):
                p = self.insert_rows(rows, new_db, destination, num_conn > 1)
                if p is not None:
                    p.start()
                    pthreads.append(p)
    
            if num_conn > 1:
                # Wait for all to finish
                num_complete = 0
                while num_complete < len(pthreads):
                    for p in pthreads:
                        if not p.is_alive():
                            num_complete += 1
Beispiel #13
0
    def __init__(self, source, name, options={}):
        """Constructor

        source[in]         A Server object
        name[in]           Name of database
        verbose[in]        print extra data during operations (optional)
                           default value = False
        options[in]        Array of options for controlling what is included
                           and how operations perform (e.g., verbose)
        """
        self.source = source
        # Keep database identifier considering backtick quotes
        if is_quoted_with_backticks(name):
            self.q_db_name = name
            self.db_name = remove_backtick_quoting(self.q_db_name)
        else:
            self.db_name = name
            self.q_db_name = quote_with_backticks(self.db_name)
        self.verbose = options.get("verbose", False)
        self.skip_tables = options.get("skip_tables", False)
        self.skip_views = options.get("skip_views", False)
        self.skip_triggers = options.get("skip_triggers", False)
        self.skip_procs = options.get("skip_procs", False)
        self.skip_funcs = options.get("skip_funcs", False)
        self.skip_events = options.get("skip_events", False)
        self.skip_grants = options.get("skip_grants", False)
        self.skip_create = options.get("skip_create", False)
        self.skip_data = options.get("skip_data", False)
        self.exclude_patterns = options.get("exclude_patterns", None)
        self.use_regexp = options.get("use_regexp", False)
        self.new_db = None
        self.q_new_db = None
        self.init_called = False
        self.destination = None  # Used for copy mode
        self.cloning = False  # Used for clone mode
        self.query_options = {"fetch": False}  # Used for skipping fetch of rows

        self.objects = []
        self.new_objects = []
Beispiel #14
0
    def get_object_type(self, object_name):
        """Return the object type of an object
        
        This method attempts to locate the object name among the objects
        in the database. It returns the object type if found or None
        if not found.
        
        object_name[in]    Name of the object to find
        
        Returns (string) object type or None if not found
        """
        object_type = None

        # Remove object backticks if needed
        obj_name = remove_backtick_quoting(object_name) if is_quoted_with_backticks(object_name) else object_name

        res = self.source.exec_query(_OBJTYPE_QUERY % {"db_name": self.db_name, "obj_name": obj_name})

        if res != [] and res is not None and len(res) > 0:
            object_type = res[0][0]
            if object_type == "BASE TABLE":
                object_type = "TABLE"

        return object_type
Beispiel #15
0
def _build_create_table(db_name, tbl_name, engine, columns, col_ref={}):
    """Build the CREATE TABLE command for a table.

    This method uses the data from the _read_next() method to build a
    table from its parts as read from a non-SQL formatted file.

    db_name[in]       Database name for the object
    tbl_name[in]      Name of the table
    engine[in]        Storage engine name for the table
    columns[in]       A list of the column definitions for the table
    col_ref[in]       A dictionary of column names/indexes

    Returns (string) the CREATE TABLE statement.
    """
    # Quote db_name and tbl_name with backticks if needed
    if not is_quoted_with_backticks(db_name):
        db_name = quote_with_backticks(db_name)
    if not is_quoted_with_backticks(tbl_name):
        tbl_name = quote_with_backticks(tbl_name)

    create_str = "CREATE TABLE %s.%s (\n" % (db_name, tbl_name)
    stop = len(columns)
    pri_keys = []
    keys = []
    key_str = ""
    col_name_index = col_ref.get("COLUMN_NAME", 0)
    col_type_index = col_ref.get("COLUMN_TYPE", 1)
    is_null_index = col_ref.get("IS_NULLABLE", 2)
    def_index = col_ref.get("COLUMN_DEFAULT", 3)
    col_key_index = col_ref.get("COLUMN_KEY", 4)
    const_name_index = col_ref.get("CONSTRAINT_NAME", 7)
    ref_tbl_index = col_ref.get("REFERENCED_TABLE_NAME", 8)
    ref_col_index = col_ref.get("COL_NAME", 13)
    ref_col_ref = col_ref.get("REFERENCED_COLUMN_NAME", 15)
    constraints = []
    for column in range(0,stop):
        cur_col = columns[column]
        # Quote column name with backticks if needed
        col_name = cur_col[col_name_index]
        if not is_quoted_with_backticks(col_name):
            col_name = quote_with_backticks(col_name)
        create_str = "%s  %s %s" % (create_str, col_name,
                                   cur_col[col_type_index])
        if cur_col[is_null_index].upper() != "YES":
            create_str += " NOT NULL"
        if len(cur_col[def_index]) > 0 and cur_col[def_index].upper() != "NONE":
            create_str += " DEFAULT %s" % cur_col[def_index]
        elif cur_col[is_null_index].upper == "YES":
            create_str += " DEFAULT NULL"
        if len(cur_col[col_key_index]) > 0:
            if cur_col[col_key_index] == "PRI":
                pri_keys.append(cur_col[col_name_index])
            else:
                keys.append(cur_col[col_name_index])
        if column+1 < stop:
            create_str += ",\n"
    if len(pri_keys) > 0:
        key_list = pri_keys
        key_str = ",\n  PRIMARY KEY("
    elif len(keys) > 0:
        key_list = keys
        # Quote constraint name with backticks if needed
        const_name = cur_col[const_name_index]
        if const_name and not is_quoted_with_backticks(const_name):
            const_name = quote_with_backticks(const_name)
        key_str = ",\n  KEY %s (" % const_name
        constraints.append([const_name, cur_col[ref_tbl_index],
                            cur_col[ref_col_index], cur_col[ref_col_ref]])
    if len(key_str) > 0:
        stop = len(key_list)
        for key in range(0,stop):
            # Quote keys with backticks if needed
            if key_list[key] and not is_quoted_with_backticks(key_list[key]):
                key_list[key] = quote_with_backticks(key_list[key])
            key_str += "%s" % key_list[key]
            if key+1 < stop-1:
                key_str += ", "
        key_str += ")"
        create_str += key_str
    if len(constraints) > 0:
        for constraint in constraints:
            # Quote keys with backticks if needed
            for key in constraint:
                if key and not is_quoted_with_backticks(key):
                    key = quote_with_backticks(key)
            c_str = ("  CONSTRAINT {cstr} FOREIGN KEY ({fk}) REFERENCES "
                     "{ref1} ({ref2})")
            constraint_str = c_str.format(cstr=constraint[0], fk=constraint[2],
                                          ref1=constraint[1],
                                          ref2=constraint[3])
            create_str = "%s,\n%s" % (create_str, constraint_str)
    create_str = "%s\n)" % create_str
    if engine and len(engine) > 0:
        create_str = "%s ENGINE=%s" % (create_str, engine)
    create_str = "%s;" % create_str

    return create_str
        # the initial specified string. In general, this identifies the missing
        # use of backticks.
        matched_size = len(orig_db)
        if new_db:
            # add 1 for the separator ':'
            matched_size += 1
            matched_size += len(new_db)
        if matched_size != len(db):
            parser.error(
                PARSE_ERR_DB_PAIR_EXT.format(
                    db_pair=db, db1_label="orig_db", db2_label="new_db", db1_value=orig_db, db2_value=new_db
                )
            )

        # Remove backtick quotes (handled later)
        orig_db = remove_backtick_quoting(orig_db) if is_quoted_with_backticks(orig_db) else orig_db
        new_db = remove_backtick_quoting(new_db) if new_db and is_quoted_with_backticks(new_db) else new_db
        db_entry = (orig_db, new_db)
        db_list.append(db_entry)

    try:
        # Record start time.
        if opt.verbosity >= 3:
            start_copy_time = time.time()

        # Copy databases concurrently for non posix systems (windows).
        if options["multiprocess"] > 1 and os.name != "posix":
            # Create copy databases tasks.
            copy_db_tasks = []
            for db in db_list:
                copy_task = {"source_srv": source_values, "dest_srv": dest_values, "db_list": [db], "options": options}
# Parse server connection values
try:
    server_values = parse_connection(opt.server, None, options)
except FormatError:
    _, err, _ = sys.exc_info()
    parser.error("Server connection values invalid: %s." % err)
except UtilError:
    _, err, _ = sys.exc_info()
    parser.error("Server connection values invalid: %s." % err.errmsg)

# Build list of databases to copy
db_list = []
for db in args:
    # Remove backtick quotes (handled later)
    db = remove_backtick_quoting(db) if is_quoted_with_backticks(db) else db
    db_list.append(db)

try:
    # record start time
    if opt.verbosity >= 3:
        start_test = time.time()
        
    # Export all databases specified
    export_databases(server_values, db_list, options)
        
    # record elapsed time
    if opt.verbosity >= 3:
        print_elapsed_time(start_test)

except UtilError:
Beispiel #18
0
    def get_grants_for_object(self, qualified_obj_name, obj_type_str,
                              global_privs=False):
        """ Retrieves the list of grants that the current user has that that
         have effect over a given object.

        qualified_obj_name[in]   String with the qualified name of the object.
        obj_type_str[in]         String with the type of the object that we are
                                 working with, must be one of 'ROUTINE',
                                 'TABLE' or 'DATABASE'.
        global_privs[in]         If True, the wildcard'%' host privileges are
                                 also taken into account


        This method takes the MySQL privilege hierarchy into account, e.g,
        if the qualified object is a table, it returns all the grant
        statements for this user regarding that table, as well as the grant
        statements for this user regarding the db where the table is at and
        finally any global grants that the user might have.

        Returns a list of strings with the grant statements.
        """

        grant_stm_lst = self.get_grants(global_privs)
        obj_name_regexp = re.compile(REGEXP_QUALIFIED_OBJ_NAME)
        m_obj = obj_name_regexp.match(qualified_obj_name)
        grants = []
        if not m_obj:
            raise UtilError("Cannot parse the specified qualified name "
                            "'{0}'".format(qualified_obj_name))
        else:
            db_name, obj_name = m_obj.groups()
            # Quote database and object name if necessary
            if not is_quoted_with_backticks(db_name):
                db_name = quote_with_backticks(db_name)
            if obj_name and obj_name != '*':
                if not is_quoted_with_backticks(obj_name):
                    obj_name = quote_with_backticks(obj_name)

            # For each grant statement look for the ones that apply to this
            # user and object
            for grant_stm in grant_stm_lst:
                grant_tpl = self._parse_grant_statement(grant_stm[0])
                if grant_tpl:
                    # Check if any of the privileges applies to this object
                    # and if it does then check if it inherited from this
                    # statement
                    if filter_grants(grant_tpl.privileges, obj_type_str):
                        # Add global grants
                        if grant_tpl.db == '*':
                            grants.append(grant_stm[0])
                            continue
                        # Add database level grants
                        if grant_tpl.db == db_name and grant_tpl.object == '*':
                            grants.append(grant_stm[0])
                            continue
                        # If it is an object, add existing object level grants
                        # as well.
                        if obj_name:
                            if (grant_tpl.db == db_name and
                                    grant_tpl.object == obj_name):
                                grants.append(grant_stm[0])

        return grants
Beispiel #19
0
def validate_obj_type_dict(server, obj_type_dict):
    """Validates the dictionary of objects against the specified server

    This function builds a dict with the types of the objects in
    obj_type_dict, filtering out non existing databases and objects.

    Returns a dictionary with only the existing objects, using  object_types
    as keys and as values a list of tuples (<DB NAME>, <OBJ_NAME>).
    """
    valid_obj_dict = defaultdict(list)
    server_dbs = set(row[0] for row in
                     server.get_all_databases(
                         ignore_internal_dbs=False))
    argument_dbs = set(obj_type_dict.keys())

    # Get non existing_databases and dbs to check
    non_existing_dbs = argument_dbs.difference(server_dbs)
    dbs_to_check = server_dbs.intersection(argument_dbs)

    if non_existing_dbs:
        if len(non_existing_dbs) > 1:
            plurals = ('s', '', 'them')
        else:
            plurals = ('', 'es', 'it')
        print('# WARNING: specified database{0} do{1} not '
              'exist on base server and will be skipped along '
              'any tables and routines belonging to {2}: '
              '{3}.'.format(plurals[0], plurals[1], plurals[2],
                            ", ".join(non_existing_dbs)))

    # Now for each db that actually exists, get the type of the specified
    # objects
    for db_name in dbs_to_check:
        db = Database(server, db_name)
        # quote database name if necessary
        quoted_db_name = db_name
        if not is_quoted_with_backticks(db_name):
            quoted_db_name = quote_with_backticks(db_name)
        for obj_name in obj_type_dict[db_name]:
            if obj_name is None:
                # We must consider the database itself
                valid_obj_dict[DATABASE_TYPE].append((quoted_db_name,
                                                      quoted_db_name))
            else:
                # get quoted name for obj_name
                quoted_obj_name = obj_name
                if not is_quoted_with_backticks(obj_name):
                    quoted_obj_name = quote_with_backticks(obj_name)

                # Test if the object exists and if it does, test if it
                # is one of the supported object types, else
                # print a warning and skip the object
                obj_type = db.get_object_type(obj_name)
                if obj_type is None:
                    print("# WARNING: specified object does not exist. "
                          "{0}.{1} will be skipped."
                          "".format(quoted_db_name, quoted_obj_name))
                elif 'PROCEDURE' in obj_type or 'FUNCTION' in obj_type:
                    valid_obj_dict[ROUTINE_TYPE].append((quoted_db_name,
                                                         quoted_obj_name))
                elif 'TABLE' in obj_type:
                    valid_obj_dict[TABLE_TYPE].append((quoted_db_name,
                                                       quoted_obj_name))
                else:
                    print('# WARNING: specified object is not supported '
                          '(not a DATABASE, FUNCTION, PROCEDURE or TABLE),'
                          ' as such it will be skipped: {0}.{1}.'
                          ''.format(quoted_db_name, quoted_obj_name))
    return valid_obj_dict
        options.update(get_ssl_dict(opt))
        server_values = parse_connection(opt.server, None, options)
    except FormatError:
        _, err, _ = sys.exc_info()
        parser.error("Server connection values invalid: {0}.".format(err))
    except UtilError:
        _, err, _ = sys.exc_info()
        parser.error("Server connection values invalid: "
                     "{0}.".format(err.errmsg))

    # Build list of databases to copy
    db_list = []
    for db in args:
        # Remove backtick quotes (handled later)
        db = remove_backtick_quoting(db) \
            if is_quoted_with_backticks(db) else db
        db_list.append(db)

    try:
        # record start time
        if opt.verbosity >= 3:
            start_export_time = time.time()

        # Export databases concurrently for non posix systems (windows).
        if options['multiprocess'] > 1 and os.name != 'posix':
            # Create export databases tasks.
            export_db_tasks = []
            for db in db_list:
                export_task = {
                    'srv_con': server_values,
                    'db_list': [db],
Beispiel #21
0
def _build_create_objects(obj_type, db, definitions):
    """Build the CREATE and GRANT SQL statments for object definitions.

    This method takes the object information read from the file using the
    _read_next() method and constructs SQL definition statements for each
    object. It receives a block of objects and creates a statement for
    each object.

    obj_type[in]      The object type
    db[in]            The database
    definitions[in]   The list of object definition data from the file

    Returns (string[]) - a list of SQL statements for the objects
    """
    create_strings = []
    skip_header = True
    obj_db = ""
    obj_name = ""
    col_list = []
    stop = len(definitions)
    col_ref = {}
    engine = None
    # Now the tricky part.
    for i in range(0, stop):
        if skip_header:
            skip_header = False
            col_ref = _build_column_ref(definitions[i])
            continue
        defn = definitions[i]
        # Read engine from first row and save old value.
        old_engine = engine
        engine = defn[col_ref.get("ENGINE", 2)]
        create_str = ""
        if obj_type == "TABLE":
            if (obj_db == "" and obj_name == ""):
                obj_db = defn[col_ref.get("TABLE_SCHEMA", 0)]
                obj_name = defn[col_ref.get("TABLE_NAME", 1)]
            if (obj_db == defn[col_ref.get("TABLE_SCHEMA",0)] and \
                obj_name == defn[col_ref.get("TABLE_NAME",1)]):
                col_list.append(defn)
            else:
                create_str = _build_create_table(obj_db, obj_name, old_engine,
                                                 col_list, col_ref)
                create_strings.append(create_str)
                obj_db = defn[col_ref.get("TABLE_SCHEMA", 0)]
                obj_name = defn[col_ref.get("TABLE_NAME", 1)]
                col_list = []
                col_list.append(defn)
            # check for end.
            if i + 1 == stop:
                create_str = _build_create_table(obj_db, obj_name, engine,
                                                 col_list, col_ref)
                create_strings.append(create_str)
        elif obj_type == "VIEW":
            # Quote table schema and name with backticks if needed
            if not is_quoted_with_backticks(defn[col_ref.get(
                    "TABLE_SCHEMA", 0)]):
                obj_db = quote_with_backticks(defn[col_ref.get(
                    "TABLE_SCHEMA", 0)])
            else:
                obj_db = defn[col_ref.get("TABLE_SCHEMA", 0)]
            if not is_quoted_with_backticks(defn[col_ref.get("TABLE_NAME",
                                                             1)]):
                obj_name = quote_with_backticks(defn[col_ref.get(
                    "TABLE_NAME", 1)])
            else:
                obj_name = defn[col_ref.get("TABLE_NAME", 1)]
            # Create VIEW statement
            create_str = (
                "CREATE ALGORITHM=UNDEFINED DEFINER={defr} "
                "SQL SECURITY {sec} VIEW {scma}.{tbl} AS {defv}; ").format(
                    defr=defn[col_ref.get("DEFINER", 2)],
                    sec=defn[col_ref.get("SECURITY_TYPE", 3)],
                    scma=obj_db,
                    tbl=obj_name,
                    defv=defn[col_ref.get("VIEW_DEFINITION", 4)])
            create_strings.append(create_str)
        elif obj_type == "TRIGGER":
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get(
                    "TRIGGER_NAME", 0)]):
                obj_name = quote_with_backticks(defn[col_ref.get(
                    "TRIGGER_NAME", 0)])
            else:
                obj_name = defn[col_ref.get("TRIGGER_NAME", 0)]

            if not is_quoted_with_backticks(defn[col_ref.get(
                    "EVENT_OBJECT_SCHEMA", 3)]):
                evt_scma = quote_with_backticks(defn[col_ref.get(
                    "EVENT_OBJECT_SCHEMA", 3)])
            else:
                evt_scma = defn[col_ref.get("EVENT_OBJECT_SCHEMA", 3)]

            if not is_quoted_with_backticks(defn[col_ref.get(
                    "EVENT_OBJECT_TABLE", 4)]):
                evt_tbl = quote_with_backticks(defn[col_ref.get(
                    "EVENT_OBJECT_TABLE", 4)])
            else:
                evt_tbl = defn[col_ref.get("EVENT_OBJECT_TABLE", 4)]

            # Create TRIGGER statement
            # Important Note: There is a bug in the server when backticks are
            # used in the trigger statement, i.e. the ACTION_STATEMENT value in
            # INFORMATION_SCHEMA.TRIGGERS is incorrect (see BUG##16291011).
            create_str = (
                "CREATE DEFINER={defr} "
                "TRIGGER {scma}.{trg} {act_t} {evt_m} "
                "ON {evt_s}.{evt_t} FOR EACH {act_o} {act_s};").format(
                    defr=defn[col_ref.get("DEFINER", 1)],
                    scma=obj_db,
                    trg=obj_name,
                    act_t=defn[col_ref.get("ACTION_TIMING", 6)],
                    evt_m=defn[col_ref.get("EVENT_MANIPULATION", 2)],
                    evt_s=evt_scma,
                    evt_t=evt_tbl,
                    act_o=defn[col_ref.get("ACTION_ORIENTATION", 5)],
                    act_s=defn[col_ref.get("ACTION_STATEMENT", 7)])
            create_strings.append(create_str)
        elif obj_type in ("PROCEDURE", "FUNCTION"):
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get("NAME", 0)]):
                obj_name = quote_with_backticks(defn[col_ref.get("NAME", 0)])
            else:
                obj_name = defn[col_ref.get("NAME", 0)]

            # Create PROCEDURE or FUNCTION statement
            if obj_type == "FUNCTION":
                func_str = " RETURNS %s" % defn[col_ref.get("RETURNS", 7)]
                if defn[col_ref.get("IS_DETERMINISTI", 3)] == 'YES':
                    func_str = "%s DETERMINISTIC" % func_str
            else:
                func_str = ""
            create_str = ("CREATE DEFINER={defr}"
                          " {type} {scma}.{name}({par_lst})"
                          "{func_ret} {body};").format(
                              defr=defn[col_ref.get("DEFINER", 5)],
                              type=obj_type,
                              scma=obj_db,
                              name=obj_name,
                              par_lst=defn[col_ref.get("PARAM_LIST", 6)],
                              func_ret=func_str,
                              body=defn[col_ref.get("BODY", 8)])
            create_strings.append(create_str)
        elif obj_type == "EVENT":
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get("NAME", 0)]):
                obj_name = quote_with_backticks(defn[col_ref.get("NAME", 0)])
            else:
                obj_name = defn[col_ref.get("NAME", 0)]

            # Create EVENT statement
            create_str = ("CREATE EVENT {scma}.{name} "
                          "ON SCHEDULE EVERY {int_v} {int_f} "
                          "STARTS '{starts}' ").format(
                              scma=obj_db,
                              name=obj_name,
                              int_v=defn[col_ref.get("INTERVAL_VALUE", 5)],
                              int_f=defn[col_ref.get("INTERVAL_FIELD", 6)],
                              starts=defn[col_ref.get("STARTS", 8)])

            ends_index = col_ref.get("ENDS", 9)
            if len(defn[ends_index]) > 0 and \
               defn[ends_index].upper() != "NONE":
                create_str = "%s ENDS '%s' " % (create_str, defn[ends_index])
            if defn[col_ref.get("ON_COMPLETION", 11)] == "DROP":
                create_str = "%s ON COMPLETION NOT PRESERVE " % create_str
            if defn[col_ref.get("STATUS", 10)] == "DISABLED":
                create_str = "%s DISABLE " % create_str
            create_str = "%s DO %s;" % (create_str, defn[col_ref.get(
                "BODY", 2)])
            create_strings.append(create_str)
        elif obj_type == "GRANT":
            try:
                user, priv, db, tbl = defn[0:4]
            except:
                raise UtilError("Object data invalid: %s : %s" % \
                                     (obj_type, defn))
            if not tbl:
                tbl = "*"
            elif tbl.upper() == "NONE":
                tbl = "*"

            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db
            obj_tbl = quote_with_backticks(tbl) \
                        if (tbl != '*'
                            and not is_quoted_with_backticks(tbl)) else tbl

            # Create GRANT statement
            create_str = "GRANT %s ON %s.%s TO %s" % (priv, obj_db, obj_tbl,
                                                      user)
            create_strings.append(create_str)
        elif obj_type in ["RPL_COMMAND", "GTID_COMMAND"]:
            create_strings.append([defn])
        else:
            raise UtilError("Unknown object type discovered: %s" % obj_type)
    return create_strings
        # add 1 for the separator ':'
        matched_size = matched_size + 1
        matched_size = matched_size + len(parts[1])
    # Verify if the size of the databases matched by the REGEX is equal to the
    # initial specified string. In general, this identifies the missing use
    # of backticks.
    if matched_size != len(db):
        parser.error(PARSE_ERR_DB_PAIR_EXT.format(db_pair=db,
                                                  db1_label='db1',
                                                  db2_label='db2',
                                                  db1_value=parts[0],
                                                  db2_value=parts[1]))

    # Remove backtick quotes (handled later)
    db1 = remove_backtick_quoting(parts[0]) \
                if is_quoted_with_backticks(parts[0]) else parts[0]
    db2 = remove_backtick_quoting(parts[1]) \
                if is_quoted_with_backticks(parts[1]) else parts[1]

    try:
        res = database_compare(server1_values, server2_values,
                               db1, db2, options)
        print
    except UtilError:
        _, e, _ = sys.exc_info()
        print("ERROR: %s" % e.errmsg)
        check_failed = True
        if not opt.run_all_tests:
            break
    if not res:
        check_failed = True
Beispiel #23
0
def check_index(src_val, table_args, options):
    """Check for duplicate or redundant indexes for one or more tables

    This method will examine the indexes for one or more tables and identify
    any indexes that are potential duplicates or redundant. It prints the
    equivalent DROP statements if selected.

    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    table_args[in]     list of tables in the form 'db.table' or 'db'
    options[in]        dictionary of options to include:
                         show-drops   : show drop statements for dupe indexes
                         skip         : skip non-existent tables
                         verbosity    : print extra information
                         show-indexes : show all indexes for each table
                         index-format : index format = sql, table, tab, csv
                         worst        : show worst performing indexes
                         best         : show best performing indexes
                         report-indexes : reports tables without PK or UK

    Returns bool True = success, raises UtilError if error
    """

    # Get options
    show_drops = options.get("show-drops", False)
    skip = options.get("skip", False)
    verbosity = options.get("verbosity", False)
    show_indexes = options.get("show-indexes", False)
    index_format = options.get("index-format", False)
    stats = options.get("stats", False)
    first_indexes = options.get("best", None)
    last_indexes = options.get("worst", None)
    report_indexes = options.get("report-indexes", False)

    # Try to connect to the MySQL database server.
    conn_options = {
        'quiet': verbosity == 1,
        'version': "5.0.0",
    }
    servers = connect_servers(src_val, None, conn_options)

    source = servers[0]

    db_list = []     # list of databases
    table_list = []  # list of all tables to process

    # Build a list of objects to process
    # 1. start with db_list if no objects present on command line
    # 2. process command line options.
    # 3. loop through database list and add all tables
    # 4. check indexes

    obj_name_regexp = re.compile(REGEXP_QUALIFIED_OBJ_NAME)

    # Perform the options check here. Loop through objects presented.
    for obj in table_args:
        m_obj = obj_name_regexp.match(obj)
        # Check if a valid database/table name is specified.
        if not m_obj:
            raise UtilError(PARSE_ERR_OBJ_NAME_FORMAT.format(
                obj_name=obj, option="the database/table arguments"))
        else:
            db_name, obj_name = m_obj.groups()
            if obj_name:
                # Table specified
                table_list.append(obj)
            # Else we are operating on a specific database.
            else:
                # Remove backtick quotes.
                db_name = remove_backtick_quoting(db_name) \
                    if is_quoted_with_backticks(db_name) else db_name
                db_list.append(db_name)

    # Loop through database list adding tables
    for db in db_list:
        db_source = Database(source, db)
        db_source.init()
        tables = db_source.get_db_objects("TABLE")
        if not tables and verbosity >= 1:
            print "# Warning: database %s does not exist. Skipping." % (db)
        for table in tables:
            table_list.append("{0}.{1}".format(quote_with_backticks(db),
                                               quote_with_backticks(table[0])))

    # Fail if no tables to check
    if not table_list:
        raise UtilError("No tables to check.")

    if verbosity > 1:
        print "# Checking indexes..."
    # Check indexes for each table in the list
    for table_name in table_list:
        tbl_options = {
            'verbose': verbosity >= 1,
            'get_cols': False,
            'quiet': verbosity is None or verbosity < 1
        }
        tbl = Table(source, table_name, tbl_options)
        exists = tbl.exists()
        if not exists and not skip:
            raise UtilError("Table %s does not exist. Use --skip "
                            "to skip missing tables." % table_name)
        if exists:
            if not tbl.get_indexes():
                if verbosity > 1 or report_indexes:
                    print "# Table %s is not indexed." % (table_name)
            else:
                if show_indexes:
                    tbl.print_indexes(index_format, verbosity)
                    # Show if table has primary key
                if verbosity > 1 or report_indexes:
                    if not tbl.has_primary_key():
                        if not tbl.has_unique_key():
                            print("# Table {0} does not contain neither a "
                                  "PRIMARY nor UNIQUE key.".format(table_name))
                        else:
                            print("# Table {0} does not contain a PRIMARY key."
                                  "".format(table_name))
                tbl.check_indexes(show_drops)

            # Show best and/or worst indexes
            if stats:
                if first_indexes is not None:
                    tbl.show_special_indexes(index_format, first_indexes, True)
                if last_indexes is not None:
                    tbl.show_special_indexes(index_format, last_indexes)

        if verbosity > 1:
            print "#"

    if verbosity > 1:
        print "# ...done."
    # check unique keys
    ukey_regexp = re.compile(r'(?:(?:;){{0,1}}{0}\.{0})'
                             ''.format(REGEXP_OBJ_NAME))

    db_idxes_l = None

    # Split the table names considering backtick quotes
    if opt.use_indexes:
        grp = ukey_regexp.findall(opt.use_indexes)
        if not grp:
            parser.error("Can't parse the specified --use-indexes argument {0}"
                         "".format(opt.use_indexes))
        db_idxes_l = []
        for table, index in grp:
            table_uc = (table if is_quoted_with_backticks(table)
                        else quote_with_backticks(table))
            index_uc = (index if is_quoted_with_backticks(index)
                        else quote_with_backticks(index))
            db_idxes_l.append((table_uc, index_uc))

    # Set options for database operations.
    options = {
        "quiet": opt.quiet,
        "verbosity": opt.verbosity,
        "difftype": opt.difftype,
        "run_all_tests": opt.run_all_tests,
        "width": opt.width,
        "no_checksum_table": opt.no_checksum_table,
        "no_object_check": opt.no_object_check,
        "no_diff": opt.no_diff,
Beispiel #25
0
    def __init__(self, server1, name, options=None):
        """Constructor

        server[in]         A Server object
        name[in]           Name of table in the form (db.table)
        options[in]        options for class: verbose, quiet, get_cols,
            quiet     If True, do not print information messages
            verbose   print extra data during operations (optional)
                      (default is False)
            get_cols  If True, get the column metadata on construction
                      (default is False)
        """
        if options is None:
            options = {}
        self.verbose = options.get('verbose', False)
        self.quiet = options.get('quiet', False)
        self.server = server1

        # Keep table identifier considering backtick quotes
        if is_quoted_with_backticks(name):
            self.q_table = name
            self.q_db_name, self.q_tbl_name = Database.parse_object_name(name)
            self.db_name = remove_backtick_quoting(self.q_db_name)
            self.tbl_name = remove_backtick_quoting(self.q_tbl_name)
            self.table = ".".join([self.db_name, self.tbl_name])
        else:
            self.table = name
            self.db_name, self.tbl_name = Database.parse_object_name(name)
            self.q_db_name = quote_with_backticks(self.db_name)
            self.q_tbl_name = quote_with_backticks(self.tbl_name)
            self.q_table = ".".join([self.q_db_name, self.q_tbl_name])
        self.obj_type = "TABLE"
        self.pri_idx = None

        # We store each type of index in a separate list to make it easier
        # to manipulate
        self.btree_indexes = []
        self.hash_indexes = []
        self.rtree_indexes = []
        self.fulltext_indexes = []
        self.text_columns = []
        self.blob_columns = []
        self.column_format = None
        self.column_names = []
        self.q_column_names = []
        if options.get('get_cols', False):
            self.get_column_metadata()
        self.dest_vals = None
        self.storage_engine = None

        # Get max allowed packet
        res = self.server.exec_query("SELECT @@session.max_allowed_packet")
        if res:
            self.max_packet_size = res[0][0]
        else:
            self.max_packet_size = _MAXPACKET_SIZE
        # Watch for invalid values
        if self.max_packet_size > _MAXPACKET_SIZE:
            self.max_packet_size = _MAXPACKET_SIZE

        self._insert = "INSERT INTO %s.%s VALUES "
        self.query_options = {  # Used for skipping fetch of rows
            'fetch': False
        }
Beispiel #26
0
    def copy_objects(self, new_db, options, new_server=None, connections=1, check_exists=True):
        """Copy the database objects.

        This method will copy a database and all of its objects and data
        to another, new database. Options set at instantiation will determine
        if there are objects that are excluded from the copy. Likewise,
        the method will also skip data if that option was set and process
        an input file with INSERT statements if that option was set.

        The method can also be used to copy a database to another server
        by providing the new server object (new_server). Copy to the same
        name by setting new_db = old_db or as a new database.
        
        new_db[in]         Name of the new database
        options[in]        Options for copy e.g. force, etc.
        new_server[in]     Connection to another server for copying the db
                           Default is None (copy to same server - clone)
        connections[in]    Number of threads(connections) to use for insert
        check_exists[in]   If True, check for database existance before copy
                           Default is True
        """

        from mysql.utilities.common.table import Table

        # Must call init() first!
        # Guard for init() prerequisite
        assert self.init_called, "You must call db.init() before " + "db.copy_objects()."

        grant_msg_displayed = False

        if new_db:
            # Assign new database identifier considering backtick quotes.
            if is_quoted_with_backticks(new_db):
                self.q_new_db = new_db
                self.new_db = remove_backtick_quoting(new_db)
            else:
                self.new_db = new_db
                self.q_new_db = quote_with_backticks(new_db)
        else:
            # If new_db is not defined use the same as source database.
            self.new_db = self.db_name
            self.q_new_db = self.q_db_name

        self.destination = new_server

        # We know we're cloning if there is no new connection.
        self.cloning = new_server == self.source

        if self.cloning:
            self.destination = self.source

        # Check to see if database exists
        if check_exists:
            exists = False
            drop_server = None
            if self.cloning:
                exists = self.exists(self.source, new_db)
                drop_server = self.source
            else:
                exists = self.exists(self.destination, new_db)
                drop_server = self.destination
            if exists:
                if options.get("force", False):
                    self.drop(drop_server, True, new_db)
                elif not self.skip_create:
                    raise UtilDBError(
                        "destination database exists. Use " "--force to overwrite existing " "database.", -1, new_db
                    )

        # Create new database first
        if not self.skip_create:
            if self.cloning:
                self.create(self.source, new_db)
            else:
                self.create(self.destination, new_db)

        # Create the objects in the new database
        for obj in self.objects:

            # Drop object if --force specified and database not dropped
            # Grants do not need to be dropped for overwriting
            if options.get("force", False) and obj[0] != _GRANT:
                self.__drop_object(obj[0], obj[1][0])

            # Create the object
            self.__create_object(
                obj[0],
                obj[1],
                not grant_msg_displayed,
                options.get("quiet", False),
                options.get("new_engine", None),
                options.get("def_engine", None),
            )

            if obj[0] == _GRANT and not grant_msg_displayed:
                grant_msg_displayed = True
Beispiel #27
0
def import_file(dest_val, file_name, options):
    """Import a file

    This method reads a file and, if needed, transforms the file into
    discrete SQL statements for execution on the destination server.

    It accepts any of the formal structured files produced by the
    mysqlexport utility including formats SQL, CSV, TAB, GRID, and
    VERTICAL.

    It will read these files and skip or include the definitions or data
    as specified in the options. An error is raised for any conversion
    errors or errors while executing the statements.

    Users are highly encouraged to use the --dryrun option which will
    print the SQL statements without executing them.

    dest_val[in]       a dictionary containing connection information for the
                       destination including:
                       (user, password, host, port, socket)
    file_name[in]      name (and path) of the file to import
    options[in]        a dictionary containing the options for the import:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, and debug)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.options import check_engine_options
    from mysql.utilities.common.table import Table
    from mysql.utilities.common.server import connect_servers

    # Helper method to dig through the definitions for create statements
    def _process_definitions(statements, table_col_list, db_name):
        # First, get the SQL strings
        sql_strs = _build_create_objects(obj_type, db_name, definitions)
        statements.extend(sql_strs)
        # Now, save the column list
        col_list = _build_col_metadata(obj_type, definitions)
        if len(col_list) > 0:
            table_col_list.extend(col_list)

    def _process_data(tbl_name, statements, columns, table_col_list,
                      table_rows, skip_blobs):
        # if there is data here, build bulk inserts
        # First, create table reference, then call insert_rows()
        tbl = Table(destination, tbl_name)
        # Need to check to see if table exists!
        if tbl.exists():
            tbl.get_column_metadata()
            col_meta = True
        elif len(table_col_list) > 0:
            col_meta = _get_column_metadata(tbl, table_col_list)
        else:
            fix_cols = []
            fix_cols.append((tbl.tbl_name, columns))
            col_meta = _get_column_metadata(tbl, fix_cols)
        if not col_meta:
            raise UtilError("Cannot build bulk insert statements without "
                            "the table definition.")
        ins_strs = tbl.make_bulk_insert(table_rows, tbl.q_db_name)
        if len(ins_strs[0]) > 0:
            statements.extend(ins_strs[0])
        if len(ins_strs[1]) > 0 and not skip_blobs:
            for update in ins_strs[1]:
                statements.append(update)

    # Gather options
    format = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    quiet = options.get("quiet", False)
    import_type = options.get("import_type", "definitions")
    single = options.get("single", True)
    dryrun = options.get("dryrun", False)
    do_drop = options.get("do_drop", False)
    skip_blobs = options.get("skip_blobs", False)
    skip_gtid = options.get("skip_gtid", False)

    # Attempt to connect to the destination server
    conn_options = {
        'quiet': quiet,
        'version': "5.1.30",
    }
    servers = connect_servers(dest_val, None, conn_options)

    destination = servers[0]

    # Check storage engines
    check_engine_options(destination, options.get("new_engine", None),
                         options.get("def_engine", None), False,
                         options.get("quiet", False))

    if not quiet:
        if import_type == "both":
            str = "definitions and data"
        else:
            str = import_type
        print "# Importing %s from %s." % (str, file_name)

    # Setup variables we will need
    skip_header = not no_headers
    if format == "sql":
        skip_header = False
    get_db = True
    check_privileges = False
    db_name = None
    file = open(file_name)
    columns = []
    read_columns = False
    table_rows = []
    obj_type = ""
    definitions = []
    statements = []
    table_col_list = []
    tbl_name = ""
    skip_rpl = options.get("skip_rpl", False)
    gtid_command_found = False
    supports_gtid = servers[0].supports_gtid() == 'ON'
    skip_gtid_warning_printed = False
    gtid_version_checked = False

    # Read the file one object/definition group at a time
    for row in read_next(file, format):
        # Check for replication command
        if row[0] == "RPL_COMMAND":
            if not skip_rpl:
                statements.append(row[1])
            continue
        if row[0] == "GTID_COMMAND":
            gtid_command_found = True
            if not supports_gtid:
                # only display warning once
                if not skip_gtid_warning_printed:
                    print _GTID_SKIP_WARNING
                    skip_gtid_warning_printed = True
            elif not skip_gtid:
                if not gtid_version_checked:
                    gtid_version_checked = True
                    # Check GTID version for complete feature support
                    servers[0].check_gtid_version()
                    # Check the gtid_purged value too
                    servers[0].check_gtid_executed("import")
                statements.append(row[1])
            continue
        # If this is the first pass, get the database name from the file
        if get_db:
            if skip_header:
                skip_header = False
            else:
                db_name = _get_db(row)
                # quote db_name with backticks if needed
                if db_name and not is_quoted_with_backticks(db_name):
                    db_name = quote_with_backticks(db_name)
                get_db = False
                if do_drop and import_type != "data":
                    statements.append("DROP DATABASE IF EXISTS %s;" % db_name)
                if import_type != "data":
                    if not _skip_object("CREATE_DB", options) and \
                       not format == 'sql':
                        statements.append("CREATE DATABASE %s;" % db_name)

        # This is the first time through the loop so we must
        # check user permissions on source for all databases
        if db_name is not None:
            dest_db = Database(destination, db_name)

            # Make a dictionary of the options
            access_options = options.copy()

            dest_db.check_write_access(dest_val['user'], dest_val['host'],
                                       access_options)

        # Now check to see if we want definitions, data, or both:
        if row[0] == "sql" or row[0] in _DEFINITION_LIST:
            if format != "sql" and len(row[1]) == 1:
                raise UtilError("Cannot read an import file generated with "
                                "--display=NAMES")

            if import_type in ("definitions", "both"):
                if format == "sql":
                    statements.append(row[1])
                else:
                    if obj_type == "":
                        obj_type = row[0]
                    if obj_type != row[0]:
                        if len(definitions) > 0:
                            _process_definitions(statements, table_col_list,
                                                 db_name)
                        obj_type = row[0]
                        definitions = []
                    if not _skip_object(row[0], options):
                        definitions.append(row[1])
        else:
            # see if there are any definitions to process
            if len(definitions) > 0:
                _process_definitions(statements, table_col_list, db_name)
                definitions = []

            if import_type in ("data", "both"):
                if _skip_object("DATA", options):
                    continue  # skip data
                elif format == "sql":
                    statements.append(row[1])
                else:
                    if row[0] == "BEGIN_DATA":
                        # Start of table so first row is columns.
                        if len(table_rows) > 0:
                            _process_data(tbl_name, statements, columns,
                                          table_col_list, table_rows,
                                          skip_blobs)
                            table_rows = []
                        read_columns = True
                        tbl_name = row[1]
                        if not is_quoted_with_backticks(tbl_name):
                            db, sep, tbl = tbl_name.partition('.')
                            q_db = quote_with_backticks(db)
                            q_tbl = quote_with_backticks(tbl)
                            tbl_name = ".".join([q_db, q_tbl])
                    else:
                        if read_columns:
                            columns = row[1]
                            read_columns = False
                        else:
                            if not single:
                                table_rows.append(row[1])
                            else:
                                str = _build_insert_data(
                                    columns, tbl_name, row[1])
                                statements.append(str)

    # Process remaining definitions
    if len(definitions) > 0:
        _process_definitions(statements, table_col_list, db_name)
        definitions = []

    # Process remaining data rows
    if len(table_rows) > 0:
        _process_data(tbl_name, statements, columns, table_col_list,
                      table_rows, skip_blobs)
        table_rows = []

    # Now process the statements
    _exec_statements(statements, destination, format, options, dryrun)

    file.close()

    # Check gtid process
    if supports_gtid and not gtid_command_found:
        print _GTID_MISSING_WARNING

    if not quiet:
        print "#...done."
    return True
Beispiel #28
0
def _build_create_objects(obj_type, db, definitions):
    """Build the CREATE and GRANT SQL statments for object definitions.

    This method takes the object information read from the file using the
    _read_next() method and constructs SQL definition statements for each
    object. It receives a block of objects and creates a statement for
    each object.

    obj_type[in]      The object type
    db[in]            The database
    definitions[in]   The list of object definition data from the file

    Returns (string[]) - a list of SQL statements for the objects
    """
    create_strings = []
    skip_header = True
    obj_db = ""
    obj_name = ""
    col_list = []
    stop = len(definitions)
    col_ref = {}
    engine = None
    # Now the tricky part.
    for i in range(0,stop):
        if skip_header:
            skip_header = False
            col_ref = _build_column_ref(definitions[i])
            continue
        defn = definitions[i]
        # Read engine from first row and save old value.
        old_engine = engine
        engine = defn[col_ref.get("ENGINE",2)]
        create_str = ""
        if obj_type == "TABLE":
            if (obj_db == "" and obj_name == ""):
                obj_db = defn[col_ref.get("TABLE_SCHEMA",0)]
                obj_name = defn[col_ref.get("TABLE_NAME",1)]
            if (obj_db == defn[col_ref.get("TABLE_SCHEMA",0)] and \
                obj_name == defn[col_ref.get("TABLE_NAME",1)]):
                col_list.append(defn)
            else:
                create_str = _build_create_table(obj_db, obj_name,
                                                 old_engine,
                                                 col_list, col_ref)
                create_strings.append(create_str)
                obj_db = defn[col_ref.get("TABLE_SCHEMA",0)]
                obj_name = defn[col_ref.get("TABLE_NAME",1)]
                col_list = []
                col_list.append(defn)
            # check for end.
            if i+1 == stop:
                create_str = _build_create_table(obj_db, obj_name,
                                                 engine,
                                                 col_list, col_ref)
                create_strings.append(create_str)
        elif obj_type == "VIEW":
            # Quote table schema and name with backticks if needed
            if not is_quoted_with_backticks(defn[col_ref.get("TABLE_SCHEMA",
                                                             0)]):
                obj_db = quote_with_backticks(defn[col_ref.get("TABLE_SCHEMA",
                                                               0)])
            else:
                obj_db = defn[col_ref.get("TABLE_SCHEMA", 0)]
            if not is_quoted_with_backticks(defn[col_ref.get("TABLE_NAME",
                                                             1)]):
                obj_name = quote_with_backticks(defn[col_ref.get("TABLE_NAME",
                                                                 1)])
            else:
                obj_name = defn[col_ref.get("TABLE_NAME", 1)]
            # Create VIEW statement
            create_str = ("CREATE ALGORITHM=UNDEFINED DEFINER={defr} "
                          "SQL SECURITY {sec} VIEW {scma}.{tbl} AS {defv}; "
                          ).format(defr=defn[col_ref.get("DEFINER", 2)],
                                   sec=defn[col_ref.get("SECURITY_TYPE", 3)],
                                   scma=obj_db, tbl=obj_name,
                                   defv=defn[col_ref.get("VIEW_DEFINITION",
                                                         4)])
            create_strings.append(create_str)
        elif obj_type == "TRIGGER":
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get("TRIGGER_NAME",
                                                             0)]):
                obj_name = quote_with_backticks(defn[col_ref.get("TRIGGER_NAME",
                                                                 0)])
            else:
                obj_name = defn[col_ref.get("TRIGGER_NAME", 0)]

            if not is_quoted_with_backticks(
                        defn[col_ref.get("EVENT_OBJECT_SCHEMA", 3)]):
                evt_scma = quote_with_backticks(
                                defn[col_ref.get("EVENT_OBJECT_SCHEMA", 3)])
            else:
                evt_scma = defn[col_ref.get("EVENT_OBJECT_SCHEMA", 3)]

            if not is_quoted_with_backticks(
                        defn[col_ref.get("EVENT_OBJECT_TABLE", 4)]):
                evt_tbl = quote_with_backticks(
                                defn[col_ref.get("EVENT_OBJECT_TABLE", 4)])
            else:
                evt_tbl = defn[col_ref.get("EVENT_OBJECT_TABLE", 4)]

            # Create TRIGGER statement
            # Important Note: There is a bug in the server when backticks are
            # used in the trigger statement, i.e. the ACTION_STATEMENT value in
            # INFORMATION_SCHEMA.TRIGGERS is incorrect (see BUG##16291011).
            create_str = ("CREATE DEFINER={defr} "
                          "TRIGGER {scma}.{trg} {act_t} {evt_m} "
                          "ON {evt_s}.{evt_t} FOR EACH {act_o} {act_s};"
                          ).format(defr=defn[col_ref.get("DEFINER", 1)],
                                   scma=obj_db, trg=obj_name,
                                   act_t=defn[col_ref.get("ACTION_TIMING", 6)],
                                   evt_m=defn[col_ref.get("EVENT_MANIPULATION",
                                                          2)],
                                   evt_s=evt_scma, evt_t=evt_tbl,
                                   act_o=defn[col_ref.get("ACTION_ORIENTATION",
                                                          5)],
                                   act_s=defn[col_ref.get("ACTION_STATEMENT",
                                                          7)])
            create_strings.append(create_str)
        elif obj_type in ("PROCEDURE", "FUNCTION"):
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get("NAME", 0)]):
                obj_name = quote_with_backticks(defn[col_ref.get("NAME", 0)])
            else:
                obj_name = defn[col_ref.get("NAME", 0)]

            # Create PROCEDURE or FUNCTION statement
            if obj_type == "FUNCTION":
                func_str = " RETURNS %s" % defn[col_ref.get("RETURNS", 7)]
                if defn[col_ref.get("IS_DETERMINISTI", 3)] == 'YES':
                    func_str = "%s DETERMINISTIC" % func_str
            else:
                func_str = ""
            create_str = ("CREATE DEFINER={defr}"
                          " {type} {scma}.{name}({par_lst})"
                          "{func_ret} {body};"
                          ).format(defr=defn[col_ref.get("DEFINER", 5)],
                                   type=obj_type, scma=obj_db, name=obj_name,
                                   par_lst=defn[col_ref.get("PARAM_LIST", 6)],
                                   func_ret=func_str,
                                   body=defn[col_ref.get("BODY", 8)])
            create_strings.append(create_str)
        elif obj_type == "EVENT":
            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db

            if not is_quoted_with_backticks(defn[col_ref.get("NAME", 0)]):
                obj_name = quote_with_backticks(defn[col_ref.get("NAME", 0)])
            else:
                obj_name = defn[col_ref.get("NAME", 0)]

            # Create EVENT statement
            create_str = ("CREATE EVENT {scma}.{name} "
                          "ON SCHEDULE EVERY {int_v} {int_f} "
                          "STARTS '{starts}' "
                          ).format(scma=obj_db, name=obj_name,
                                   int_v=defn[col_ref.get("INTERVAL_VALUE",
                                                          5)],
                                   int_f=defn[col_ref.get("INTERVAL_FIELD",
                                                          6)],
                                   starts=defn[col_ref.get("STARTS", 8)]
                                   )

            ends_index = col_ref.get("ENDS", 9)
            if len(defn[ends_index]) > 0 and \
               defn[ends_index].upper() != "NONE":
                create_str = "%s ENDS '%s' " % (create_str, defn[ends_index])
            if defn[col_ref.get("ON_COMPLETION", 11)] == "DROP":
                create_str = "%s ON COMPLETION NOT PRESERVE " % create_str
            if defn[col_ref.get("STATUS", 10)] == "DISABLED":
                create_str = "%s DISABLE " % create_str
            create_str = "%s DO %s;" % (create_str,
                                        defn[col_ref.get("BODY", 2)])
            create_strings.append(create_str)
        elif obj_type == "GRANT":
            try:
                user, priv, db, tbl = defn[0:4]
            except:
                raise UtilError("Object data invalid: %s : %s" % \
                                     (obj_type, defn))
            if not tbl:
                tbl = "*"
            elif tbl.upper() == "NONE":
                tbl = "*"

            # Quote required identifiers with backticks
            obj_db = quote_with_backticks(db) \
                        if not is_quoted_with_backticks(db) else db
            obj_tbl = quote_with_backticks(tbl) \
                        if (tbl != '*'
                            and not is_quoted_with_backticks(tbl)) else tbl

            # Create GRANT statement
            create_str = "GRANT %s ON %s.%s TO %s" % (priv, obj_db, obj_tbl,
                                                      user)
            create_strings.append(create_str)
        elif obj_type in ["RPL_COMMAND", "GTID_COMMAND"]:
            create_strings.append([defn])
        else:
            raise UtilError("Unknown object type discovered: %s" % obj_type)
    return create_strings
Beispiel #29
0
def import_file(dest_val, file_name, options):
    """Import a file

    This method reads a file and, if needed, transforms the file into
    discrete SQL statements for execution on the destination server.

    It accepts any of the formal structured files produced by the
    mysqlexport utility including formats SQL, CSV, TAB, GRID, and
    VERTICAL.

    It will read these files and skip or include the definitions or data
    as specified in the options. An error is raised for any conversion
    errors or errors while executing the statements.

    Users are highly encouraged to use the --dryrun option which will
    print the SQL statements without executing them.

    dest_val[in]       a dictionary containing connection information for the
                       destination including:
                       (user, password, host, port, socket)
    file_name[in]      name (and path) of the file to import
    options[in]        a dictionary containing the options for the import:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, and debug)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.options import check_engine_options
    from mysql.utilities.common.table import Table
    from mysql.utilities.common.server import connect_servers

    # Helper method to dig through the definitions for create statements
    def _process_definitions(statements, table_col_list, db_name):
        # First, get the SQL strings
        sql_strs = _build_create_objects(obj_type, db_name, definitions)
        statements.extend(sql_strs)
        # Now, save the column list
        col_list = _build_col_metadata(obj_type, definitions)
        if len(col_list) > 0:
            table_col_list.extend(col_list)

    def _process_data(tbl_name, statements, columns,
                      table_col_list, table_rows, skip_blobs):
        # if there is data here, build bulk inserts
        # First, create table reference, then call insert_rows()
        tbl = Table(destination, tbl_name)
        # Need to check to see if table exists!
        if tbl.exists():
            tbl.get_column_metadata()
            col_meta = True
        elif len(table_col_list) > 0:
            col_meta = _get_column_metadata(tbl, table_col_list)
        else:
            fix_cols = []
            fix_cols.append((tbl.tbl_name, columns))
            col_meta = _get_column_metadata(tbl, fix_cols)
        if not col_meta:
            raise UtilError("Cannot build bulk insert statements without "
                                 "the table definition.")
        ins_strs = tbl.make_bulk_insert(table_rows, tbl.q_db_name)
        if len(ins_strs[0]) > 0:
            statements.extend(ins_strs[0])
        if len(ins_strs[1]) > 0 and not skip_blobs:
            for update in ins_strs[1]:
                statements.append(update)

    # Gather options
    format = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    quiet = options.get("quiet", False)
    import_type = options.get("import_type", "definitions")
    single = options.get("single", True)
    dryrun = options.get("dryrun", False)
    do_drop = options.get("do_drop", False)
    skip_blobs = options.get("skip_blobs", False)
    skip_gtid = options.get("skip_gtid", False)

    # Attempt to connect to the destination server
    conn_options = {
        'quiet'     : quiet,
        'version'   : "5.1.30",
    }
    servers = connect_servers(dest_val, None, conn_options)

    destination = servers[0]

    # Check storage engines
    check_engine_options(destination,
                         options.get("new_engine", None),
                         options.get("def_engine", None),
                         False, options.get("quiet", False))

    if not quiet:
        if import_type == "both":
            str = "definitions and data"
        else:
            str = import_type
        print "# Importing %s from %s." % (str, file_name)

    # Setup variables we will need
    skip_header = not no_headers
    if format == "sql":
        skip_header = False
    get_db = True
    check_privileges = False
    db_name = None
    file = open(file_name)
    columns = []
    read_columns = False
    table_rows = []
    obj_type = ""
    definitions = []
    statements = []
    table_col_list = []
    tbl_name = ""
    skip_rpl = options.get("skip_rpl", False)
    gtid_command_found = False
    supports_gtid = servers[0].supports_gtid() == 'ON'
    skip_gtid_warning_printed = False
    gtid_version_checked = False

    # Read the file one object/definition group at a time
    for row in read_next(file, format):
        # Check for replication command
        if row[0] == "RPL_COMMAND":
            if not skip_rpl:
                statements.append(row[1])
            continue
        if row[0] == "GTID_COMMAND":
            gtid_command_found = True
            if not supports_gtid:
                # only display warning once
                if not skip_gtid_warning_printed:
                    print _GTID_SKIP_WARNING
                    skip_gtid_warning_printed = True
            elif not skip_gtid:
                if not gtid_version_checked:
                    gtid_version_checked = True
                    # Check GTID version for complete feature support
                    servers[0].check_gtid_version()
                    # Check the gtid_purged value too
                    servers[0].check_gtid_executed("import")
                statements.append(row[1])
            continue
        # If this is the first pass, get the database name from the file
        if get_db:
            if skip_header:
                skip_header = False
            else:
                db_name = _get_db(row)
                # quote db_name with backticks if needed
                if db_name and not is_quoted_with_backticks(db_name):
                    db_name = quote_with_backticks(db_name)
                get_db = False
                if do_drop and import_type != "data":
                    statements.append("DROP DATABASE IF EXISTS %s;" % db_name)
                if import_type != "data":
                    if not _skip_object("CREATE_DB", options) and \
                       not format == 'sql':
                        statements.append("CREATE DATABASE %s;" % db_name)

        # This is the first time through the loop so we must
        # check user permissions on source for all databases
        if db_name is not None:
            dest_db = Database(destination, db_name)

            # Make a dictionary of the options
            access_options = options.copy()

            dest_db.check_write_access(dest_val['user'], dest_val['host'],
                                       access_options)
            
        # Now check to see if we want definitions, data, or both:
        if row[0] == "sql" or row[0] in _DEFINITION_LIST:
            if format != "sql" and len(row[1]) == 1:
                raise UtilError("Cannot read an import file generated with "
                                "--display=NAMES")

            if import_type in ("definitions", "both"):
                if format == "sql":
                    statements.append(row[1])
                else:
                    if obj_type == "":
                        obj_type = row[0]
                    if obj_type != row[0]:
                        if len(definitions) > 0:
                            _process_definitions(statements, table_col_list,
                                                 db_name)
                        obj_type = row[0]
                        definitions = []
                    if not _skip_object(row[0], options):
                        definitions.append(row[1])
        else:
            # see if there are any definitions to process
            if len(definitions) > 0:
                _process_definitions(statements, table_col_list, db_name)
                definitions = []

            if import_type in ("data", "both"):
                if _skip_object("DATA", options):
                    continue  # skip data
                elif format == "sql":
                    statements.append(row[1])
                else:
                    if row[0] == "BEGIN_DATA":
                        # Start of table so first row is columns.
                        if len(table_rows) > 0:
                            _process_data(tbl_name, statements, columns,
                                          table_col_list, table_rows,
                                          skip_blobs)
                            table_rows = []
                        read_columns = True
                        tbl_name = row[1]
                        if not is_quoted_with_backticks(tbl_name):
                            db, sep, tbl = tbl_name.partition('.')
                            q_db = quote_with_backticks(db)
                            q_tbl = quote_with_backticks(tbl)
                            tbl_name = ".".join([q_db, q_tbl])
                    else:
                        if read_columns:
                            columns = row[1]
                            read_columns = False
                        else:
                            if not single:
                                table_rows.append(row[1])
                            else:
                                str = _build_insert_data(columns, tbl_name,
                                                         row[1])
                                statements.append(str)

    # Process remaining definitions                                 
    if len(definitions) > 0:
        _process_definitions(statements, table_col_list, db_name)
        definitions = []

    # Process remaining data rows
    if len(table_rows) > 0:
        _process_data(tbl_name, statements, columns,
                      table_col_list, table_rows, skip_blobs)
        table_rows = []

    # Now process the statements
    _exec_statements(statements, destination, format, options, dryrun)

    file.close()
    
    # Check gtid process
    if supports_gtid and not gtid_command_found:
        print _GTID_MISSING_WARNING

    if not quiet:
        print "#...done."
    return True
Beispiel #30
0
    # of backticks.
    matched_size = len(orig_db)
    if new_db:
        # add 1 for the separator ':'
        matched_size = matched_size + 1
        matched_size = matched_size + len(new_db)
    if matched_size != len(db):
        parser.error(PARSE_ERR_DB_PAIR_EXT.format(db_pair=db,
                                                  db1_label='orig_db',
                                                  db2_label='new_db',
                                                  db1_value=orig_db,
                                                  db2_value=new_db))

    # Remove backtick quotes (handled later)
    orig_db = remove_backtick_quoting(orig_db) \
                if is_quoted_with_backticks(orig_db) else orig_db
    new_db = remove_backtick_quoting(new_db) \
                if new_db and is_quoted_with_backticks(new_db) else new_db
    db_entry = (orig_db, new_db)
    db_list.append(db_entry)

try:
    # record start time
    if opt.verbosity >= 3:
        start_test = time.time()
    dbcopy.copy_db(source_values, dest_values, db_list, options)
    if opt.verbosity >= 3:
        print_elapsed_time(start_test)
except UtilError:
    _, e, _ = sys.exc_info()
    print("ERROR: %s" % e.errmsg)