Exemplo n.º 1
0
    def check_objects(self, server, db, events=True):
        """Check number of objects.
        
        Creates a string containing the number of objects for a given database.
        
        server[in]         Server object to query
        db[in]             name of database to check
        
        Returns string
        """

        from mysql.utilities.common.database import Database

        db_source = Database(server, db)
        db_source.init()
        res = db_source.get_db_objects("TABLE")
        str = "OBJECT COUNTS: tables = %s, " % (len(res))
        res = db_source.get_db_objects("VIEW")
        str += "views = %s, " % (len(res))
        res = db_source.get_db_objects("TRIGGER")
        str += "triggers = %s, " % (len(res))
        res = db_source.get_db_objects("PROCEDURE")
        str += "procedures = %s, " % (len(res))
        res = db_source.get_db_objects("FUNCTION")
        str += "functions = %s, " % (len(res))
        if events:
            res = db_source.get_db_objects("EVENT")
            str += "events = %s \n" % (len(res))
        return str
Exemplo n.º 2
0
    def check_objects(self, server, db, events=True):
        """Check number of objects.
        
        Creates a string containing the number of objects for a given database.
        
        server[in]         Server object to query
        db[in]             name of database to check
        
        Returns string
        """

        from mysql.utilities.common.database import Database

        db_source = Database(server, db)
        db_source.init()
        res = db_source.get_db_objects("TABLE")
        str = "OBJECT COUNTS: tables = %s, " % (len(res))
        res = db_source.get_db_objects("VIEW")
        str += "views = %s, " % (len(res))
        res = db_source.get_db_objects("TRIGGER")
        str += "triggers = %s, " % (len(res))
        res = db_source.get_db_objects("PROCEDURE")
        str += "procedures = %s, " % (len(res))
        res = db_source.get_db_objects("FUNCTION")
        str += "functions = %s, " % (len(res))
        if events:
            res = db_source.get_db_objects("EVENT")
            str += "events = %s \n" % (len(res))
        return str
Exemplo n.º 3
0
def check_index(src_val, table_args, options):
    """Check for duplicate or redundant indexes for one or more tables
    
    This method will examine the indexes for one or more tables and identify
    any indexes that are potential duplicates or redundant. It prints the
    equivalent DROP statements if selected.
    
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    table_args[in]     list of tables in the form 'db.table' or 'db'
    options[in]        dictionary of options to include:
                         show-drops   : show drop statements for dupe indexes
                         skip         : skip non-existant tables
                         verbosity    : print extra information
                         show-indexes : show all indexes for each table
                         index-format : index format = sql, table, tab, csv
                         worst        : show worst performing indexes
                         best         : show best performing indexes
    
    Returns bool True = success, raises UtilError if error
    """
    
    # Get options
    show_drops = options.get("show-drops", False)
    skip = options.get("skip", False)
    verbosity = options.get("verbosity", False)
    show_indexes = options.get("show-indexes", False)
    index_format = options.get("index-format", False)
    stats = options.get("stats", False)
    first_indexes = options.get("best", None)        
    last_indexes = options.get("worst", None)

    from mysql.utilities.common.server import connect_servers
    from mysql.utilities.common.database import Database
    from mysql.utilities.common.table import Table

    # Try to connect to the MySQL database server.
    conn_options = {
        'quiet'     : verbosity == 1,
        'version'   : "5.0.0",
    }
    servers = connect_servers(src_val, None, conn_options)

    source = servers[0]

    db_list = []     # list of databases
    table_list = []  # list of all tables to process
    
    # Build a list of objects to process
    # 1. start with db_list if no obects present on command line
    # 2. process command line options.
    # 3. loop through database list and add all tables
    # 4. check indexes
        
    # Perform the options check here. Loop through objects presented.
    for obj in table_args:
        # If a . appears, we are operating on a specific table
        idx = obj.count(".")
        if (idx == 1):
            table_list.append(obj)
        # Else we are operating on a specific database.
        else:
            db_list.append(obj)
    
    # Loop through database list adding tables
    for db in db_list:
        db_source = Database(source, db)
        db_source.init()
        tables = db_source.get_db_objects("TABLE")
        if not tables and verbosity >= 1:
            print "# Warning: database %s does not exist. Skipping." % (db)
        for table in tables:
            table_list.append(db + "." + table[0])

    # Fail if no tables to check
    if not table_list:
        raise UtilError("No tables to check.")

    if verbosity > 1:
        print "# Checking indexes..."
    # Check indexes for each table in the list
    for table_name in table_list:
        tbl_options = {
            'verbose'  : verbosity >= 1,
            'get_cols' : False,
            'quiet'    : verbosity is None or verbosity < 1
        }
        tbl = Table(source, table_name, tbl_options)
        exists = tbl.exists()
        if not exists and not skip:
            raise UtilError("Table %s does not exist. Use --skip "
                                 "to skip missing tables." % table_name)
        if exists:
            if not tbl.get_indexes():
                if verbosity > 1:
                    print "# Table %s is not indexed." % (table_name)
            else:
                if show_indexes:
                    tbl.print_indexes(index_format)
                    # Show if table has primary key
                if not tbl.has_primary_key():
                    if verbosity > 1:
                        print "#   Table %s does not contain a PRIMARY key."
                tbl.check_indexes(show_drops)
                
            # Show best and/or worst indexes
            if stats:
                if first_indexes is not None:
                    tbl.show_special_indexes(index_format, first_indexes, True)
                if last_indexes is not None:
                    tbl.show_special_indexes(index_format, last_indexes)
                
        if verbosity > 1:
            print "#"

    if verbosity > 1:    
        print "# ...done."
Exemplo n.º 4
0
def check_index(src_val, table_args, options):
    """Check for duplicate or redundant indexes for one or more tables

    This method will examine the indexes for one or more tables and identify
    any indexes that are potential duplicates or redundant. It prints the
    equivalent DROP statements if selected.

    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    table_args[in]     list of tables in the form 'db.table' or 'db'
    options[in]        dictionary of options to include:
                         show-drops   : show drop statements for dupe indexes
                         skip         : skip non-existent tables
                         verbosity    : print extra information
                         show-indexes : show all indexes for each table
                         index-format : index format = sql, table, tab, csv
                         worst        : show worst performing indexes
                         best         : show best performing indexes
                         report-indexes : reports tables without PK or UK

    Returns bool True = success, raises UtilError if error
    """

    # Get options
    show_drops = options.get("show-drops", False)
    skip = options.get("skip", False)
    verbosity = options.get("verbosity", False)
    show_indexes = options.get("show-indexes", False)
    index_format = options.get("index-format", False)
    stats = options.get("stats", False)
    first_indexes = options.get("best", None)
    last_indexes = options.get("worst", None)
    report_indexes = options.get("report-indexes", False)

    # Try to connect to the MySQL database server.
    conn_options = {
        'quiet': verbosity == 1,
        'version': "5.0.0",
    }
    servers = connect_servers(src_val, None, conn_options)

    source = servers[0]

    db_list = []  # list of databases
    table_list = []  # list of all tables to process

    # Build a list of objects to process
    # 1. start with db_list if no objects present on command line
    # 2. process command line options.
    # 3. loop through database list and add all tables
    # 4. check indexes

    # Get sql_mode value set on servers
    sql_mode = source.select_variable("SQL_MODE")

    # Perform the options check here. Loop through objects presented.
    for obj in table_args:
        m_obj = parse_object_name(obj, sql_mode)
        # Check if a valid database/table name is specified.
        if m_obj[0] is None:
            raise UtilError(
                PARSE_ERR_OBJ_NAME_FORMAT.format(
                    obj_name=obj, option="the database/table arguments"))
        else:
            db_name, obj_name = m_obj
            if obj_name:
                # Table specified
                table_list.append(obj)
            # Else we are operating on a specific database.
            else:
                # Remove backtick quotes.
                db_name = remove_backtick_quoting(db_name, sql_mode) \
                    if is_quoted_with_backticks(db_name, sql_mode) else db_name
                db_list.append(db_name)

    # Loop through database list adding tables
    for db in db_list:
        db_source = Database(source, db)
        db_source.init()
        tables = db_source.get_db_objects("TABLE")
        if not tables and verbosity >= 1:
            print("# Warning: database %s does not exist. Skipping." % (db))
        for table in tables:
            table_list.append("{0}.{1}".format(
                quote_with_backticks(db, sql_mode),
                quote_with_backticks(table[0], sql_mode)))

    # Fail if no tables to check
    if not table_list:
        raise UtilError("No tables to check.")

    if verbosity > 1:
        print("# Checking indexes...")
    # Check indexes for each table in the list
    # pylint: disable=R0101
    for table_name in table_list:
        tbl_options = {
            'verbose': verbosity >= 1,
            'get_cols': False,
            'quiet': verbosity is None or verbosity < 1
        }
        tbl = Table(source, table_name, tbl_options)
        exists = tbl.exists()
        if not exists and not skip:
            raise UtilError("Table %s does not exist. Use --skip "
                            "to skip missing tables." % table_name)
        if exists:
            if not tbl.get_indexes():
                if verbosity > 1 or report_indexes:
                    print("# Table %s is not indexed." % (table_name))
            else:
                if show_indexes:
                    tbl.print_indexes(index_format, verbosity)
                    # Show if table has primary key
                if verbosity > 1 or report_indexes:
                    if not tbl.has_primary_key():
                        if not tbl.has_unique_key():
                            print("# Table {0} does not contain neither a "
                                  "PRIMARY nor UNIQUE key.".format(table_name))
                        else:
                            print("# Table {0} does not contain a PRIMARY key."
                                  "".format(table_name))
                tbl.check_indexes(show_drops)

            # Show best and/or worst indexes
            if stats:
                if first_indexes is not None:
                    tbl.show_special_indexes(index_format, first_indexes, True)
                if last_indexes is not None:
                    tbl.show_special_indexes(index_format, last_indexes)

        if verbosity > 1:
            print("#")

    if verbosity > 1:
        print("# ...done.")
Exemplo n.º 5
0
def get_copy_lock(server, db_list, options, include_mysql=False,
                  cloning=False):
    """Get an instance of the Lock class with a standard copy (read) lock

    This method creates an instance of the Lock class using the lock type
    specified in the options. It is used to initiate the locks for the copy
    and related operations.

    server[in]             Server instance for locking calls
    db_list[in]            list of database names
    options[in]            option dictionary
                           Must include the skip_* options for copy and export
    include_mysql[in]      if True, include the mysql tables for copy operation
    cloning[in]            if True, create lock tables with WRITE on dest db
                           Default = False

    Returns Lock - Lock class instance
    """
    rpl_mode = options.get("rpl_mode", None)
    locking = options.get('locking', 'snapshot')

    # Determine if we need to use FTWRL. There are two conditions:
    #  - running on master (rpl_mode = 'master')
    #  - using locking = 'lock-all' and rpl_mode present
    if (rpl_mode in ["master", "both"]) or \
            (rpl_mode and locking == 'lock-all'):
        new_opts = options.copy()
        new_opts['locking'] = 'flush'
        lock = Lock(server, [], new_opts)

    # if this is a lock-all type and not replication operation,
    # find all tables and lock them
    elif locking == 'lock-all':
        table_lock_list = []

        # Build table lock list
        for db_name in db_list:
            db = db_name[0] if type(db_name) == tuple else db_name
            source_db = Database(server, db)
            tables = source_db.get_db_objects("TABLE")
            for table in tables:
                table_lock_list.append(("{0}.{1}".format(db, table[0]),
                                        'READ'))
                # Cloning requires issuing WRITE locks because we use same
                # conn.
                # Non-cloning will issue WRITE lock on a new destination conn.
                if cloning:
                    if db_name[1] is None:
                        db_clone = db_name[0]
                    else:
                        db_clone = db_name[1]
                    # For cloning, we use the same connection so we need to
                    # lock the destination tables with WRITE.
                    table_lock_list.append(("{0}.{1}".format(db_clone,
                                                             table[0]),
                                            'WRITE'))
            # We must include views for server version 5.5.3 and higher
            if server.check_version_compat(5, 5, 3):
                tables = source_db.get_db_objects("VIEW")
                for table in tables:
                    table_lock_list.append(("{0}.{1}".format(db, table[0]),
                                            'READ'))
                    # Cloning requires issuing WRITE locks because we use same
                    # conn.
                    # Non-cloning will issue WRITE lock on a new destination
                    # conn.
                    if cloning:
                        if db_name[1] is None:
                            db_clone = db_name[0]
                        else:
                            db_clone = db_name[1]
                        # For cloning, we use the same connection so we need to
                        # lock the destination tables with WRITE.
                        table_lock_list.append(("{0}.{1}".format(db_clone,
                                                                 table[0]),
                                                'WRITE'))

        # Now add mysql tables
        if include_mysql:
            # Don't lock proc tables if no procs of funcs are being read
            if not options.get('skip_procs', False) and \
               not options.get('skip_funcs', False):
                table_lock_list.append(("mysql.proc", 'READ'))
                table_lock_list.append(("mysql.procs_priv", 'READ'))
            # Don't lock event table if events are skipped
            if not options.get('skip_events', False):
                table_lock_list.append(("mysql.event", 'READ'))
        lock = Lock(server, table_lock_list, options)

    # Use default or no locking option
    else:
        lock = Lock(server, [], options)

    return lock
Exemplo n.º 6
0
def _export_data(source, server_values, db_list, output_file, options):
    """Export data from the specified list of databases.

    This private method retrieves the data for each specified databases in SQL
    format (e.g., INSERT statements) or in a tabular form (GRID, TAB, CSV,
    VERTICAL) to the specified file.

    This private method does not check permissions.

    source[in]         Server instance.
    server_values[in]  Server connection values.
    db_list[in]        List of databases to export.
    output_file[in]    Output file to store the export data.
    options[in]        Dictionary containing the options for the export:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, file_per_tbl,
                       and debug).
    """
    frmt = options.get("format", "sql")
    quiet = options.get("quiet", False)
    file_per_table = options.get("file_per_tbl", False)

    # Get tables list.
    table_list = []
    for db_name in db_list:
        source_db = Database(source, db_name)
        # Build table list.
        tables = source_db.get_db_objects("TABLE")
        for table in tables:
            table_list.append((db_name, table[0]))

    previous_db = ""
    export_tbl_tasks = []
    for table in table_list:

        # Determine start for processing table from a different database.
        db_name = table[0]
        if previous_db != db_name:
            previous_db = db_name
            if not quiet:
                if frmt == "sql":
                    q_db_name = quote_with_backticks(db_name)
                    output_file.write("USE {0};\n".format(q_db_name))
                output_file.write(
                    "# Exporting data from {0}\n".format(db_name)
                )
                if file_per_table:
                    output_file.write("# Writing table data to files.\n")

            # Print sample SOURCE command warning even in quiet mode.
            if file_per_table and frmt == 'sql':
                output_file.write("# The following are sample SOURCE commands."
                                  " If needed correct the path to match files "
                                  "location.\n")

        # Check multiprocess table export (only on POSIX systems).
        if options['multiprocess'] > 1 and os.name == 'posix':
            # Create export task.
            # Note: Server connection values are passed in the task dictionary
            # instead of a server instance, otherwise a multiprocessing error
            # is issued when assigning the task to a worker.
            export_task = {
                'srv_con': server_values,
                'table': table,
                'options': options,
            }
            export_tbl_tasks.append(export_task)
        else:
            # Export data from a table (no multiprocessing).
            _export_table_data(source, table, output_file, options)

        # Print SOURCE command if --file-per-table is used and format is SQL.
        if file_per_table and frmt == 'sql':
            tbl_name = ".".join(table)
            output_file.write(
                "# SOURCE {0}\n".format(_generate_tbl_filename(tbl_name, frmt))
            )

    # Export tables concurrently.
    if export_tbl_tasks:
        # Create process pool.
        workers_pool = multiprocessing.Pool(
            processes=options['multiprocess']
        )
        # Concurrently export tables.
        res = workers_pool.map_async(multiprocess_tbl_export_task,
                                     export_tbl_tasks)
        workers_pool.close()
        # Get list of temporary files with the exported data.
        tmp_files_list = res.get()
        workers_pool.join()

        # Merge resulting temp files (if generated).
        for tmp_filename in tmp_files_list:
            if tmp_filename:
                tmp_file = open(tmp_filename, 'r')
                shutil.copyfileobj(tmp_file, output_file)
                tmp_file.close()
                os.remove(tmp_filename)

    if not quiet:
        output_file.write("#...done.\n")
Exemplo n.º 7
0
def _export_metadata(source, db_list, output_file, options):
    """Export metadata from the specified list of databases.

    This private method retrieves the objects metadata for each database listed
    in the form of CREATE (SQL) statements or in a tabular form (GRID, TAB,
    CSV, VERTICAL) to the specified file.

    This private method does not check permissions.

    source[in]         Server instance.
    db_list[in]        List of databases to export.
    output_file[in]    Output file to store the metadata information.
    options[in]        Dictionary containing the options for the export:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format,
                       debug, exclude_names, exclude_patterns)
    """
    frmt = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    column_type = options.get("display", "brief")
    quiet = options.get("quiet", False)
    skip_create = options.get("skip_create", False)
    skip_tables = options.get("skip_tables", False)
    skip_views = options.get("skip_views", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)

    for db_name in db_list:

        # Get a Database class instance
        db = Database(source, db_name, options)

        # Export database metadata
        if not quiet:
            output_file.write(
                "# Exporting metadata from {0}\n".format(db.db_name)
            )

        # Perform the extraction
        if frmt == "sql":
            db.init()
            if not skip_create:
                output_file.write(
                    "DROP DATABASE IF EXISTS {0};\n".format(db.q_db_name)
                )
                output_file.write(
                    "CREATE DATABASE {0};\n".format(db.q_db_name)
                )
            output_file.write("USE {0};\n".format(db.q_db_name))
            for dbobj in db.get_next_object():
                if dbobj[0] == "GRANT" and not skip_grants:
                    if not quiet:
                        output_file.write("# Grant:\n")
                    if dbobj[1][3]:
                        create_str = "GRANT {0} ON {1}.{2} TO {3};\n".format(
                            dbobj[1][1], db.q_db_name,
                            quote_with_backticks(dbobj[1][3]), dbobj[1][0]
                        )
                    else:
                        create_str = "GRANT {0} ON {1}.* TO {2};\n".format(
                            dbobj[1][1], db.q_db_name, dbobj[1][0]
                        )
                    output_file.write(create_str)
                else:
                    if not quiet:
                        output_file.write(
                            "# {0}: {1}.{2}\n".format(dbobj[0], db.db_name,
                                                      dbobj[1][0])
                        )
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        output_file.write("DELIMITER ||\n")
                    output_file.write("{0};\n".format(
                        db.get_create_statement(db.db_name, dbobj[1][0],
                                                dbobj[0])
                    ))
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        output_file.write("||\n")
                        output_file.write("DELIMITER ;\n")
        else:
            objects = []
            if not skip_tables:
                objects.append("TABLE")
            if not skip_funcs:
                objects.append("FUNCTION")
            if not skip_procs:
                objects.append("PROCEDURE")
            if not skip_views:
                objects.append("VIEW")
            if not skip_triggers:
                objects.append("TRIGGER")
            if not skip_events:
                objects.append("EVENT")
            if not skip_grants:
                objects.append("GRANT")
            for obj_type in objects:
                output_file.write(
                    "# {0}S in {1}:".format(obj_type, db.db_name)
                )
                if frmt in ('grid', 'vertical'):
                    rows = db.get_db_objects(obj_type, column_type, True)
                else:
                    rows = db.get_db_objects(obj_type, column_type, True, True)
                if len(rows[1]) < 1:
                    output_file.write(" (none found)\n")
                else:
                    output_file.write("\n")
                    # Cannot use print_list here because we must manipulate
                    # the behavior of format_tabular_list.
                    list_options = {}
                    if frmt == "vertical":
                        format_vertical_list(output_file, rows[0], rows[1])
                    elif frmt == "tab":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = '\t'
                        format_tabular_list(output_file, rows[0], rows[1],
                                            list_options)
                    elif frmt == "csv":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = ','
                        format_tabular_list(output_file, rows[0], rows[1],
                                            list_options)
                    else:  # default to table format
                        format_tabular_list(output_file, rows[0], rows[1])

    if not quiet:
        output_file.write("#...done.\n")
Exemplo n.º 8
0
def check_index(src_val, table_args, options):
    """Check for duplicate or redundant indexes for one or more tables

    This method will examine the indexes for one or more tables and identify
    any indexes that are potential duplicates or redundant. It prints the
    equivalent DROP statements if selected.

    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    table_args[in]     list of tables in the form 'db.table' or 'db'
    options[in]        dictionary of options to include:
                         show-drops   : show drop statements for dupe indexes
                         skip         : skip non-existent tables
                         verbosity    : print extra information
                         show-indexes : show all indexes for each table
                         index-format : index format = sql, table, tab, csv
                         worst        : show worst performing indexes
                         best         : show best performing indexes
                         report-indexes : reports tables without PK or UK

    Returns bool True = success, raises UtilError if error
    """

    # Get options
    show_drops = options.get("show-drops", False)
    skip = options.get("skip", False)
    verbosity = options.get("verbosity", False)
    show_indexes = options.get("show-indexes", False)
    index_format = options.get("index-format", False)
    stats = options.get("stats", False)
    first_indexes = options.get("best", None)
    last_indexes = options.get("worst", None)
    report_indexes = options.get("report-indexes", False)

    # Try to connect to the MySQL database server.
    conn_options = {
        'quiet': verbosity == 1,
        'version': "5.0.0",
    }
    servers = connect_servers(src_val, None, conn_options)

    source = servers[0]

    db_list = []     # list of databases
    table_list = []  # list of all tables to process

    # Build a list of objects to process
    # 1. start with db_list if no objects present on command line
    # 2. process command line options.
    # 3. loop through database list and add all tables
    # 4. check indexes

    obj_name_regexp = re.compile(REGEXP_QUALIFIED_OBJ_NAME)

    # Perform the options check here. Loop through objects presented.
    for obj in table_args:
        m_obj = obj_name_regexp.match(obj)
        # Check if a valid database/table name is specified.
        if not m_obj:
            raise UtilError(PARSE_ERR_OBJ_NAME_FORMAT.format(
                obj_name=obj, option="the database/table arguments"))
        else:
            db_name, obj_name = m_obj.groups()
            if obj_name:
                # Table specified
                table_list.append(obj)
            # Else we are operating on a specific database.
            else:
                # Remove backtick quotes.
                db_name = remove_backtick_quoting(db_name) \
                    if is_quoted_with_backticks(db_name) else db_name
                db_list.append(db_name)

    # Loop through database list adding tables
    for db in db_list:
        db_source = Database(source, db)
        db_source.init()
        tables = db_source.get_db_objects("TABLE")
        if not tables and verbosity >= 1:
            print "# Warning: database %s does not exist. Skipping." % (db)
        for table in tables:
            table_list.append("{0}.{1}".format(quote_with_backticks(db),
                                               quote_with_backticks(table[0])))

    # Fail if no tables to check
    if not table_list:
        raise UtilError("No tables to check.")

    if verbosity > 1:
        print "# Checking indexes..."
    # Check indexes for each table in the list
    for table_name in table_list:
        tbl_options = {
            'verbose': verbosity >= 1,
            'get_cols': False,
            'quiet': verbosity is None or verbosity < 1
        }
        tbl = Table(source, table_name, tbl_options)
        exists = tbl.exists()
        if not exists and not skip:
            raise UtilError("Table %s does not exist. Use --skip "
                            "to skip missing tables." % table_name)
        if exists:
            if not tbl.get_indexes():
                if verbosity > 1 or report_indexes:
                    print "# Table %s is not indexed." % (table_name)
            else:
                if show_indexes:
                    tbl.print_indexes(index_format, verbosity)
                    # Show if table has primary key
                if verbosity > 1 or report_indexes:
                    if not tbl.has_primary_key():
                        if not tbl.has_unique_key():
                            print("# Table {0} does not contain neither a "
                                  "PRIMARY nor UNIQUE key.".format(table_name))
                        else:
                            print("# Table {0} does not contain a PRIMARY key."
                                  "".format(table_name))
                tbl.check_indexes(show_drops)

            # Show best and/or worst indexes
            if stats:
                if first_indexes is not None:
                    tbl.show_special_indexes(index_format, first_indexes, True)
                if last_indexes is not None:
                    tbl.show_special_indexes(index_format, last_indexes)

        if verbosity > 1:
            print "#"

    if verbosity > 1:
        print "# ...done."
Exemplo n.º 9
0
def get_copy_lock(server,
                  db_list,
                  options,
                  include_mysql=False,
                  cloning=False):
    """Get an instance of the Lock class with a standard copy (read) lock

    This method creates an instance of the Lock class using the lock type
    specified in the options. It is used to initiate the locks for the copy
    and related operations.

    server[in]             Server instance for locking calls
    db_list[in]            list of database names
    options[in]            option dictionary
                           Must include the skip_* options for copy and export
    include_mysql[in]      if True, include the mysql tables for copy operation
    cloning[in]            if True, create lock tables with WRITE on dest db
                           Default = False

    Returns Lock - Lock class instance
    """
    rpl_mode = options.get("rpl_mode", None)
    locking = options.get('locking', 'snapshot')

    # Determine if we need to use FTWRL. There are two conditions:
    #  - running on master (rpl_mode = 'master')
    #  - using locking = 'lock-all' and rpl_mode present
    if (rpl_mode in ["master", "both"]) or \
            (rpl_mode and locking == 'lock-all'):
        new_opts = options.copy()
        new_opts['locking'] = 'flush'
        lock = Lock(server, [], new_opts)

    # if this is a lock-all type and not replication operation,
    # find all tables and lock them
    # pylint: disable=R0101
    elif locking == 'lock-all':
        table_lock_list = []

        # Build table lock list
        for db_name in db_list:
            db = db_name[0] if isinstance(db_name, tuple) else db_name
            source_db = Database(server, db)
            tables = source_db.get_db_objects("TABLE")
            for table in tables:
                table_lock_list.append(("{0}.{1}".format(db,
                                                         table[0]), 'READ'))
                # Cloning requires issuing WRITE locks because we use same
                # conn.
                # Non-cloning will issue WRITE lock on a new destination conn.
                if cloning:
                    if db_name[1] is None:
                        db_clone = db_name[0]
                    else:
                        db_clone = db_name[1]
                    # For cloning, we use the same connection so we need to
                    # lock the destination tables with WRITE.
                    table_lock_list.append(
                        ("{0}.{1}".format(db_clone, table[0]), 'WRITE'))
            # We must include views for server version 5.5.3 and higher
            if server.check_version_compat(5, 5, 3):
                tables = source_db.get_db_objects("VIEW")
                for table in tables:
                    table_lock_list.append(
                        ("{0}.{1}".format(db, table[0]), 'READ'))
                    # Cloning requires issuing WRITE locks because we use same
                    # conn.
                    # Non-cloning will issue WRITE lock on a new destination
                    # conn.
                    if cloning:
                        if db_name[1] is None:
                            db_clone = db_name[0]
                        else:
                            db_clone = db_name[1]
                        # For cloning, we use the same connection so we need to
                        # lock the destination tables with WRITE.
                        table_lock_list.append(
                            ("{0}.{1}".format(db_clone, table[0]), 'WRITE'))

        # Now add mysql tables
        if include_mysql:
            # Don't lock proc tables if no procs of funcs are being read
            if not options.get('skip_procs', False) and \
               not options.get('skip_funcs', False):
                table_lock_list.append(("mysql.proc", 'READ'))
                table_lock_list.append(("mysql.procs_priv", 'READ'))
            # Don't lock event table if events are skipped
            if not options.get('skip_events', False):
                table_lock_list.append(("mysql.event", 'READ'))
        lock = Lock(server, table_lock_list, options)

    # Use default or no locking option
    else:
        lock = Lock(server, [], options)

    return lock
Exemplo n.º 10
0
def export_metadata(source, src_val, db_list, options):
    """Produce rows to be used to recreate objects in a database.

    This method retrieves the objects for each database listed in the form
    of CREATE (SQL) statements or in a tabular form to the file specified.
    The valid values for the format parameter are SQL, CSV, TSV, VERTICAL,
    or GRID.

    source[in]         Server instance
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format,
                       debug, exclude_names, exclude_patterns)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.format import format_tabular_list
    from mysql.utilities.common.format import format_vertical_list

    format = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    column_type = options.get("display", "brief")
    skip_create = options.get("skip_create", False)
    quiet = options.get("quiet", False)
    skip_tables = options.get("skip_tables", False)
    skip_views = options.get("skip_views", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)

    if options.get("all", False):
        rows = source.get_all_databases()
        for row in rows:
            db_list.append(row[0])

    # Check user permissions on source for all databases
    for db_name in db_list:
        source_db = Database(source, db_name)
        # Make a dictionary of the options
        access_options = {
            'skip_views': skip_views,
            'skip_procs': skip_procs,
            'skip_funcs': skip_funcs,
            'skip_grants': skip_grants,
            'skip_events': skip_events,
        }

        source_db.check_read_access(src_val["user"], src_val["host"],
                                    access_options)

    for db_name in db_list:

        # Get a Database class instance
        db = Database(source, db_name, options)

        # Error is source database does not exist
        if not db.exists():
            raise UtilDBError("Source database does not exist - %s" % db_name,
                              -1, db_name)

        if not quiet:
            print "# Exporting metadata from %s" % db_name

        # Perform the extraction
        if format == "sql":
            db.init()
            # quote database name with backticks
            q_db_name = quote_with_backticks(db_name)
            if not skip_create:
                print "DROP DATABASE IF EXISTS %s;" % q_db_name
                print "CREATE DATABASE %s;" % q_db_name
            print "USE %s;" % q_db_name
            for dbobj in db.get_next_object():
                if dbobj[0] == "GRANT" and not skip_grants:
                    if not quiet:
                        print "# Grant:"
                    if dbobj[1][3]:
                        create_str = "GRANT %s ON %s.%s TO %s;" % \
                                     (dbobj[1][1], q_db_name,
                                      quote_with_backticks(dbobj[1][3]),
                                      dbobj[1][0])
                    else:
                        create_str = "GRANT %s ON %s.* TO %s;" % \
                                     (dbobj[1][1], q_db_name, dbobj[1][0])
                    if create_str.find("%"):
                        create_str = re.sub("%", "%%", create_str)
                    print create_str
                else:
                    if not quiet:
                        print "# %s: %s.%s" % (dbobj[0], db_name, dbobj[1][0])
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        print "DELIMITER ||"
                    print "%s;" % db.get_create_statement(
                        db_name, dbobj[1][0], dbobj[0])
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        print "||"
                        print "DELIMITER ;"
        else:
            objects = []
            if not skip_tables:
                objects.append("TABLE")
            if not skip_views:
                objects.append("VIEW")
            if not skip_triggers:
                objects.append("TRIGGER")
            if not skip_procs:
                objects.append("PROCEDURE")
            if not skip_funcs:
                objects.append("FUNCTION")
            if not skip_events:
                objects.append("EVENT")
            if not skip_grants:
                objects.append("GRANT")
            for obj_type in objects:
                sys.stdout.write("# %sS in %s:" % (obj_type, db_name))
                if format in ('grid', 'vertical'):
                    rows = db.get_db_objects(obj_type, column_type, True)
                else:
                    rows = db.get_db_objects(obj_type, column_type, True, True)
                if len(rows[1]) < 1:
                    print " (none found)"
                else:
                    print
                    # Cannot use print_list here becasue we must manipulate
                    # the behavior of format_tabular_list
                    list_options = {}
                    if format == "vertical":
                        format_vertical_list(sys.stdout, rows[0], rows[1])
                    elif format == "tab":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = '\t'
                        format_tabular_list(sys.stdout, rows[0], rows[1],
                                            list_options)
                    elif format == "csv":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = ','
                        format_tabular_list(sys.stdout, rows[0], rows[1],
                                            list_options)
                    else:  # default to table format
                        format_tabular_list(sys.stdout, rows[0], rows[1])

    if not quiet:
        print "#...done."

    return True
Exemplo n.º 11
0
def export_data(source, src_val, db_list, options):
    """Produce data for the tables in a database.

    This method retrieves the data for each table in the databases listed in
    the form of BULK INSERT (SQL) statements or in a tabular form to the file
    specified. The valid values for the format parameter are SQL, CSV, TSV,
    VERITCAL, or GRID.

    source[in]         Server instance
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, file_per_tbl,
                       and debug)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.table import Table

    format = options.get("format", "sql")
    no_headers = options.get("no_headers", True)
    column_type = options.get("display", "brief")
    single = options.get("single", False)
    skip_blobs = options.get("skip_blobs", False)
    quiet = options.get("quiet", False)
    file_per_table = options.get("file_per_tbl", False)
    skip_views = options.get("skip_views", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)

    if options.get("all", False):
        rows = source.get_all_databases()
        for row in rows:
            if row[0] not in db_list:
                db_list.append(row[0])

    # Check if database exists and user permissions on source for all databases
    table_lock_list = []
    table_list = []
    for db_name in db_list:
        source_db = Database(source, db_name)

        # Make a dictionary of the options
        access_options = {
            'skip_views': skip_views,
            'skip_procs': skip_procs,
            'skip_funcs': skip_funcs,
            'skip_grants': skip_grants,
            'skip_events': skip_events,
        }

        # Error is source database does not exist
        if not source_db.exists():
            raise UtilDBError("Source database does not exist - %s" % db_name,
                              -1, db_name)

        source_db.check_read_access(src_val["user"], src_val["host"],
                                    access_options)

        # Build table list
        tables = source_db.get_db_objects("TABLE")
        for table in tables:
            table_list.append((db_name, table[0]))

    old_db = ""
    for table in table_list:
        db_name = table[0]
        tbl_name = "%s.%s" % (db_name, table[1])
        # quote database and table name with backticks
        q_db_name = quote_with_backticks(db_name)
        q_tbl_name = "%s.%s" % (q_db_name, quote_with_backticks(table[1]))
        if not quiet and old_db != db_name:
            old_db = db_name
            if format == "sql":
                print "USE %s;" % q_db_name
            print "# Exporting data from %s" % db_name
            if file_per_table:
                print "# Writing table data to files."

        tbl_options = {'verbose': False, 'get_cols': True, 'quiet': quiet}
        cur_table = Table(source, q_tbl_name, tbl_options)
        if single and format not in ("sql", "grid", "vertical"):
            retrieval_mode = -1
            first = True
        else:
            retrieval_mode = 1
            first = False

        message = "# Data for table %s: " % q_tbl_name

        # switch for writing to files
        if file_per_table:
            if format == 'sql':
                file_name = tbl_name + ".sql"
            else:
                file_name = tbl_name + ".%s" % format.lower()
            outfile = open(file_name, "w")
            outfile.write(message + "\n")
        else:
            outfile = None
            print message

        for data_rows in cur_table.retrieve_rows(retrieval_mode):
            _export_row(data_rows, cur_table, format, single, skip_blobs,
                        first, no_headers, outfile)
            if first:
                first = False

        if file_per_table:
            outfile.close()

    if not quiet:
        print "#...done."

    return True
Exemplo n.º 12
0
def check_index(src_val, table_args, options):
    """Check for duplicate or redundant indexes for one or more tables
    
    This method will examine the indexes for one or more tables and identify
    any indexes that are potential duplicates or redundant. It prints the
    equivalent DROP statements if selected.
    
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    table_args[in]     list of tables in the form 'db.table' or 'db'
    options[in]        dictionary of options to include:
                         show-drops   : show drop statements for dupe indexes
                         skip         : skip non-existant tables
                         verbosity    : print extra information
                         show-indexes : show all indexes for each table
                         index-format : index format = sql, table, tab, csv
                         worst        : show worst performing indexes
                         best         : show best performing indexes
    
    Returns bool True = success, raises UtilError if error
    """

    # Get options
    show_drops = options.get("show-drops", False)
    skip = options.get("skip", False)
    verbosity = options.get("verbosity", False)
    show_indexes = options.get("show-indexes", False)
    index_format = options.get("index-format", False)
    stats = options.get("stats", False)
    first_indexes = options.get("best", None)
    last_indexes = options.get("worst", None)

    from mysql.utilities.common.server import connect_servers
    from mysql.utilities.common.database import Database
    from mysql.utilities.common.table import Table

    # Try to connect to the MySQL database server.
    conn_options = {
        'quiet': verbosity == 1,
        'version': "5.0.0",
    }
    servers = connect_servers(src_val, None, conn_options)

    source = servers[0]

    db_list = []  # list of databases
    table_list = []  # list of all tables to process

    # Build a list of objects to process
    # 1. start with db_list if no obects present on command line
    # 2. process command line options.
    # 3. loop through database list and add all tables
    # 4. check indexes

    # Perform the options check here. Loop through objects presented.
    for obj in table_args:
        # If a . appears, we are operating on a specific table
        idx = obj.count(".")
        if (idx == 1):
            table_list.append(obj)
        # Else we are operating on a specific database.
        else:
            db_list.append(obj)

    # Loop through database list adding tables
    for db in db_list:
        db_source = Database(source, db)
        db_source.init()
        tables = db_source.get_db_objects("TABLE")
        if not tables and verbosity >= 1:
            print "# Warning: database %s does not exist. Skipping." % (db)
        for table in tables:
            table_list.append(db + "." + table[0])

    # Fail if no tables to check
    if not table_list:
        raise UtilError("No tables to check.")

    if verbosity > 1:
        print "# Checking indexes..."
    # Check indexes for each table in the list
    for table_name in table_list:
        tbl_options = {
            'verbose': verbosity >= 1,
            'get_cols': False,
            'quiet': verbosity is None or verbosity < 1
        }
        tbl = Table(source, table_name, tbl_options)
        exists = tbl.exists()
        if not exists and not skip:
            raise UtilError("Table %s does not exist. Use --skip "
                            "to skip missing tables." % table_name)
        if exists:
            if not tbl.get_indexes():
                if verbosity > 1:
                    print "# Table %s is not indexed." % (table_name)
            else:
                if show_indexes:
                    tbl.print_indexes(index_format)
                    # Show if table has primary key
                if not tbl.has_primary_key():
                    if verbosity > 1:
                        print "#   Table %s does not contain a PRIMARY key."
                tbl.check_indexes(show_drops)

            # Show best and/or worst indexes
            if stats:
                if first_indexes is not None:
                    tbl.show_special_indexes(index_format, first_indexes, True)
                if last_indexes is not None:
                    tbl.show_special_indexes(index_format, last_indexes)

        if verbosity > 1:
            print "#"

    if verbosity > 1:
        print "# ...done."
Exemplo n.º 13
0
def copy_db(src_val, dest_val, db_list, options):
    """Copy a database

    This method will copy a database and all of its objects and data from
    one server (source) to another (destination). Options are available to
    selectively ignore each type of object. The do_drop parameter is
    used to permit the copy to overwrite an existing destination database
    (default is to not overwrite).

    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    dest_val[in]       a dictionary containing connection information for the
                       destination including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, verbose, do_drop, quiet,
                       connections, debug, exclude_names, exclude_patterns)

    Notes:
        do_drop  - if True, the database on the destination will be dropped
                   if it exists (default is False)
        quiet    - do not print any information during operation
                   (default is False)

    Returns bool True = success, False = error
    """
    verbose = options.get("verbose", False)
    quiet = options.get("quiet", False)
    do_drop = options.get("do_drop", False)
    skip_views = options.get("skip_views", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)
    skip_data = options.get("skip_data", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_tables = options.get("skip_tables", False)
    skip_gtid = options.get("skip_gtid", False)
    locking = options.get("locking", "snapshot")

    conn_options = {"quiet": quiet, "version": "5.1.30"}
    servers = connect_servers(src_val, dest_val, conn_options)
    cloning = (src_val == dest_val) or dest_val is None

    source = servers[0]
    if cloning:
        destination = servers[0]
    else:
        destination = servers[1]
        # Test if SQL_MODE is 'NO_BACKSLASH_ESCAPES' in the destination server
        if destination.select_variable("SQL_MODE") == "NO_BACKSLASH_ESCAPES":
            print (
                "# WARNING: The SQL_MODE in the destination server is "
                "'NO_BACKSLASH_ESCAPES', it will be changed temporarily "
                "for data insertion."
            )

    src_gtid = source.supports_gtid() == "ON"
    dest_gtid = destination.supports_gtid() == "ON" if destination else False

    # Get list of all databases from source if --all is specified.
    # Ignore system databases.
    if options.get("all", False):
        # The --all option is valid only if not cloning.
        if not cloning:
            if not quiet:
                print "# Including all databases."
            rows = source.get_all_databases()
            for row in rows:
                db_list.append((row[0], None))  # Keep same name
        else:
            raise UtilError("Cannot copy all databases on the same server.")
    elif not skip_gtid and src_gtid:
        # Check to see if this is a full copy (complete backup)
        all_dbs = source.exec_query("SHOW DATABASES")
        dbs = [db[0] for db in db_list]
        for db in all_dbs:
            if db[0].upper() in ["MYSQL", "INFORMATION_SCHEMA", "PERFORMANCE_SCHEMA", "SYS"]:
                continue
            if not db[0] in dbs:
                print _GTID_BACKUP_WARNING
                break

    # Do error checking and preliminary work:
    #  - Check user permissions on source and destination for all databases
    #  - Check to see if executing on same server but same db name (error)
    #  - Build list of tables to lock for copying data (if no skipping data)
    #  - Check storage engine compatibility
    for db_name in db_list:
        source_db = Database(source, db_name[0])
        if destination is None:
            destination = source
        if db_name[1] is None:
            db = db_name[0]
        else:
            db = db_name[1]
        dest_db = Database(destination, db)

        # Make a dictionary of the options
        access_options = {
            "skip_views": skip_views,
            "skip_procs": skip_procs,
            "skip_funcs": skip_funcs,
            "skip_grants": skip_grants,
            "skip_events": skip_events,
            "skip_triggers": skip_triggers,
        }

        source_db.check_read_access(src_val["user"], src_val["host"], access_options)

        # Make a dictionary containing the list of objects from source db
        source_objects = {
            "views": source_db.get_db_objects("VIEW", columns="full"),
            "procs": source_db.get_db_objects("PROCEDURE", columns="full"),
            "funcs": source_db.get_db_objects("FUNCTION", columns="full"),
            "events": source_db.get_db_objects("EVENT", columns="full"),
            "triggers": source_db.get_db_objects("TRIGGER", columns="full"),
        }

        dest_db.check_write_access(dest_val["user"], dest_val["host"], access_options, source_objects, do_drop)

        # Error is source db and destination db are the same and we're cloning
        if destination == source and db_name[0] == db_name[1]:
            raise UtilError(
                "Destination database name is same as "
                "source - source = %s, destination = %s" % (db_name[0], db_name[1])
            )

        # Error is source database does not exist
        if not source_db.exists():
            raise UtilError("Source database does not exist - %s" % db_name[0])

        # Check storage engines
        check_engine_options(
            destination,
            options.get("new_engine", None),
            options.get("def_engine", None),
            False,
            options.get("quiet", False),
        )

    # Get replication commands if rpl_mode specified.
    # if --rpl specified, dump replication initial commands
    rpl_info = None

    # Turn off foreign keys if they were on at the start
    destination.disable_foreign_key_checks(True)

    # Get GTID commands
    if not skip_gtid:
        gtid_info = get_gtid_commands(source)
        if src_gtid and not dest_gtid:
            print _NON_GTID_WARNING % ("destination", "source", "to")
        elif not src_gtid and dest_gtid:
            print _NON_GTID_WARNING % ("source", "destination", "from")
    else:
        gtid_info = None
        if src_gtid and not cloning:
            print _GTID_WARNING

    # If cloning, turn off gtid generation
    if gtid_info and cloning:
        gtid_info = None
    # if GTIDs enabled, write the GTID commands
    if gtid_info and dest_gtid:
        # Check GTID version for complete feature support
        destination.check_gtid_version()
        # Check the gtid_purged value too
        destination.check_gtid_executed()
        for cmd in gtid_info[0]:
            print "# GTID operation:", cmd
            destination.exec_query(cmd, {"fetch": False, "commit": False})

    if options.get("rpl_mode", None):
        new_opts = options.copy()
        new_opts["multiline"] = False
        new_opts["strict"] = True
        rpl_info = get_change_master_command(src_val, new_opts)
        destination.exec_query("STOP SLAVE", {"fetch": False, "commit": False})

    # Copy (create) objects.
    # We need to delay trigger and events to after data is loaded
    new_opts = options.copy()
    new_opts["skip_triggers"] = True
    new_opts["skip_events"] = True

    # Get the table locks unless we are cloning with lock-all
    if not (cloning and locking == "lock-all"):
        my_lock = get_copy_lock(source, db_list, options, True)

    _copy_objects(source, destination, db_list, new_opts)

    # If we are cloning, take the write locks prior to copying data
    if cloning and locking == "lock-all":
        my_lock = get_copy_lock(source, db_list, options, True, cloning)

    # Copy tables data
    if not skip_data and not skip_tables:

        # Copy tables
        for db_name in db_list:

            # Get a Database class instance
            db = Database(source, db_name[0], options)

            # Perform the copy
            # Note: No longer use threads, use multiprocessing instead.
            db.init()
            db.copy_data(db_name[1], options, destination, connections=1, src_con_val=src_val, dest_con_val=dest_val)

    # if cloning with lock-all unlock here to avoid system table lock conflicts
    if cloning and locking == "lock-all":
        my_lock.unlock()

    # Create triggers for all databases
    if not skip_triggers:
        new_opts = options.copy()
        new_opts["skip_tables"] = True
        new_opts["skip_views"] = True
        new_opts["skip_procs"] = True
        new_opts["skip_funcs"] = True
        new_opts["skip_events"] = True
        new_opts["skip_grants"] = True
        new_opts["skip_create"] = True
        _copy_objects(source, destination, db_list, new_opts, False, False)

    # Create events for all databases
    if not skip_events:
        new_opts = options.copy()
        new_opts["skip_tables"] = True
        new_opts["skip_views"] = True
        new_opts["skip_procs"] = True
        new_opts["skip_funcs"] = True
        new_opts["skip_triggers"] = True
        new_opts["skip_grants"] = True
        new_opts["skip_create"] = True
        _copy_objects(source, destination, db_list, new_opts, False, False)

    if not (cloning and locking == "lock-all"):
        my_lock.unlock()

    # if GTIDs enabled, write the GTID-related commands
    if gtid_info and dest_gtid:
        print "# GTID operation:", gtid_info[1]
        destination.exec_query(gtid_info[1])

    if options.get("rpl_mode", None):
        for cmd in rpl_info[_RPL_COMMANDS]:
            if cmd[0] == "#" and not quiet:
                print cmd
            else:
                if verbose:
                    print cmd
                destination.exec_query(cmd)
        destination.exec_query("START SLAVE;")

    # Turn on foreign keys if they were on at the start
    destination.disable_foreign_key_checks(False)

    if not quiet:
        print "#...done."
    return True
Exemplo n.º 14
0
def validate_obj_type_dict(server, obj_type_dict):
    """Validates the dictionary of objects against the specified server

    This function builds a dict with the types of the objects in
    obj_type_dict, filtering out non existing databases and objects.

    Returns a dictionary with only the existing objects, using  object_types
    as keys and as values a list of tuples (<DB NAME>, <OBJ_NAME>).
    """
    valid_obj_dict = defaultdict(list)
    server_dbs = set(
        row[0] for row in server.get_all_databases(ignore_internal_dbs=False))
    argument_dbs = set(obj_type_dict.keys())

    # Get non existing_databases and dbs to check
    non_existing_dbs = argument_dbs.difference(server_dbs)
    dbs_to_check = server_dbs.intersection(argument_dbs)

    if non_existing_dbs:
        if len(non_existing_dbs) > 1:
            plurals = ('s', '', 'them')
        else:
            plurals = ('', 'es', 'it')
        print('# WARNING: specified database{0} do{1} not '
              'exist on base server and will be skipped along '
              'any tables and routines belonging to {2}: '
              '{3}.'.format(plurals[0], plurals[1], plurals[2],
                            ", ".join(non_existing_dbs)))

    # Get sql_mode value set on servers
    sql_mode = server.select_variable("SQL_MODE")

    # Now for each db that actually exists, get the type of the specified
    # objects
    for db_name in dbs_to_check:
        db = Database(server, db_name)
        # quote database name if necessary
        quoted_db_name = db_name
        if not is_quoted_with_backticks(db_name, sql_mode):
            quoted_db_name = quote_with_backticks(db_name, sql_mode)
        # if wilcard (db.*) is used add all supported objects of the database
        if '*' in obj_type_dict[db_name]:
            obj_type_dict[db_name] = obj_type_dict[db_name] - set('*')
            tables = (table[0] for table in db.get_db_objects('TABLE'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(tables)
            procedures = (proc[0] for proc in db.get_db_objects('PROCEDURE'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(procedures)
            functions = (proc[0] for proc in db.get_db_objects('FUNCTION'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(functions)
        for obj_name in obj_type_dict[db_name]:
            if obj_name is None:
                # We must consider the database itself
                valid_obj_dict[DATABASE_TYPE].append(
                    (quoted_db_name, quoted_db_name))
            else:
                # get quoted name for obj_name
                quoted_obj_name = obj_name
                if not is_quoted_with_backticks(obj_name, sql_mode):
                    quoted_obj_name = quote_with_backticks(obj_name, sql_mode)

                # Test if the object exists and if it does, test if it
                # is one of the supported object types, else
                # print a warning and skip the object
                obj_type = db.get_object_type(obj_name)
                if obj_type is None:
                    print("# WARNING: specified object does not exist. "
                          "{0}.{1} will be skipped."
                          "".format(quoted_db_name, quoted_obj_name))
                elif 'PROCEDURE' in obj_type or 'FUNCTION' in obj_type:
                    valid_obj_dict[ROUTINE_TYPE].append(
                        (quoted_db_name, quoted_obj_name))
                elif 'TABLE' in obj_type:
                    valid_obj_dict[TABLE_TYPE].append(
                        (quoted_db_name, quoted_obj_name))
                else:
                    print('# WARNING: specified object is not supported '
                          '(not a DATABASE, FUNCTION, PROCEDURE or TABLE),'
                          ' as such it will be skipped: {0}.{1}.'
                          ''.format(quoted_db_name, quoted_obj_name))
    return valid_obj_dict
Exemplo n.º 15
0
def validate_obj_type_dict(server, obj_type_dict):
    """Validates the dictionary of objects against the specified server

    This function builds a dict with the types of the objects in
    obj_type_dict, filtering out non existing databases and objects.

    Returns a dictionary with only the existing objects, using  object_types
    as keys and as values a list of tuples (<DB NAME>, <OBJ_NAME>).
    """
    valid_obj_dict = defaultdict(list)
    server_dbs = set(row[0] for row in
                     server.get_all_databases(
                         ignore_internal_dbs=False))
    argument_dbs = set(obj_type_dict.keys())

    # Get non existing_databases and dbs to check
    non_existing_dbs = argument_dbs.difference(server_dbs)
    dbs_to_check = server_dbs.intersection(argument_dbs)

    if non_existing_dbs:
        if len(non_existing_dbs) > 1:
            plurals = ('s', '', 'them')
        else:
            plurals = ('', 'es', 'it')
        print('# WARNING: specified database{0} do{1} not '
              'exist on base server and will be skipped along '
              'any tables and routines belonging to {2}: '
              '{3}.'.format(plurals[0], plurals[1], plurals[2],
                            ", ".join(non_existing_dbs)))

    # Now for each db that actually exists, get the type of the specified
    # objects
    for db_name in dbs_to_check:
        db = Database(server, db_name)
        # quote database name if necessary
        quoted_db_name = db_name
        if not is_quoted_with_backticks(db_name):
            quoted_db_name = quote_with_backticks(db_name)
        # if wilcard (db.*) is used add all supported objects of the database
        if '*' in obj_type_dict[db_name]:
            obj_type_dict[db_name] = obj_type_dict[db_name] - set('*')
            tables = (table[0] for table in db.get_db_objects('TABLE'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(tables)
            procedures = (proc[0] for proc in db.get_db_objects('PROCEDURE'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(procedures)
            functions = (proc[0] for proc in db.get_db_objects('FUNCTION'))
            obj_type_dict[db_name] = obj_type_dict[db_name] | set(functions)
        for obj_name in obj_type_dict[db_name]:
            if obj_name is None:
                # We must consider the database itself
                valid_obj_dict[DATABASE_TYPE].append((quoted_db_name,
                                                      quoted_db_name))
            else:
                # get quoted name for obj_name
                quoted_obj_name = obj_name
                if not is_quoted_with_backticks(obj_name):
                    quoted_obj_name = quote_with_backticks(obj_name)

                # Test if the object exists and if it does, test if it
                # is one of the supported object types, else
                # print a warning and skip the object
                obj_type = db.get_object_type(obj_name)
                if obj_type is None:
                    print("# WARNING: specified object does not exist. "
                          "{0}.{1} will be skipped."
                          "".format(quoted_db_name, quoted_obj_name))
                elif 'PROCEDURE' in obj_type or 'FUNCTION' in obj_type:
                    valid_obj_dict[ROUTINE_TYPE].append((quoted_db_name,
                                                         quoted_obj_name))
                elif 'TABLE' in obj_type:
                    valid_obj_dict[TABLE_TYPE].append((quoted_db_name,
                                                       quoted_obj_name))
                else:
                    print('# WARNING: specified object is not supported '
                          '(not a DATABASE, FUNCTION, PROCEDURE or TABLE),'
                          ' as such it will be skipped: {0}.{1}.'
                          ''.format(quoted_db_name, quoted_obj_name))
    return valid_obj_dict
Exemplo n.º 16
0
def _export_metadata(source, db_list, output_file, options):
    """Export metadata from the specified list of databases.

    This private method retrieves the objects metadata for each database listed
    in the form of CREATE (SQL) statements or in a tabular form (GRID, TAB,
    CSV, VERTICAL) to the specified file.

    This private method does not check permissions.

    source[in]         Server instance.
    db_list[in]        List of databases to export.
    output_file[in]    Output file to store the metadata information.
    options[in]        Dictionary containing the options for the export:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format,
                       debug, exclude_names, exclude_patterns)
    """
    frmt = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    column_type = options.get("display", "brief")
    quiet = options.get("quiet", False)
    skip_create = options.get("skip_create", False)
    skip_tables = options.get("skip_tables", False)
    skip_views = options.get("skip_views", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)
    sql_mode = source.select_variable("SQL_MODE")

    for db_name in db_list:

        # Get a Database class instance
        db = Database(source, db_name, options)

        # Export database metadata
        if not quiet:
            output_file.write("# Exporting metadata from {0}\n".format(
                db.q_db_name))

        # Perform the extraction
        if frmt == "sql":
            db.init()
            if not skip_create:
                output_file.write("DROP DATABASE IF EXISTS {0};\n".format(
                    db.q_db_name))
                output_file.write("CREATE DATABASE {0};\n".format(
                    db.q_db_name))
            output_file.write("USE {0};\n".format(db.q_db_name))
            for dbobj in db.get_next_object():
                if dbobj[0] == "GRANT" and not skip_grants:
                    if not quiet:
                        output_file.write("# Grant:\n")
                    if dbobj[1][3]:
                        create_str = "GRANT {0} ON {1}.{2} TO {3};\n".format(
                            dbobj[1][1], db.q_db_name,
                            quote_with_backticks(dbobj[1][3], sql_mode),
                            dbobj[1][0])
                    else:
                        create_str = "GRANT {0} ON {1}.* TO {2};\n".format(
                            dbobj[1][1], db.q_db_name, dbobj[1][0])
                    output_file.write(create_str)
                else:
                    if not quiet:
                        output_file.write("# {0}: {1}.{2}\n".format(
                            dbobj[0], db.q_db_name,
                            quote_with_backticks(dbobj[1][0], sql_mode)))
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        output_file.write("DELIMITER ||\n")
                    output_file.write("{0};\n".format(
                        db.get_create_statement(db.db_name, dbobj[1][0],
                                                dbobj[0])))
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        output_file.write("||\n")
                        output_file.write("DELIMITER ;\n")
        else:
            objects = []
            if not skip_tables:
                objects.append("TABLE")
            if not skip_funcs:
                objects.append("FUNCTION")
            if not skip_procs:
                objects.append("PROCEDURE")
            if not skip_views:
                objects.append("VIEW")
            if not skip_triggers:
                objects.append("TRIGGER")
            if not skip_events:
                objects.append("EVENT")
            if not skip_grants:
                objects.append("GRANT")
            for obj_type in objects:
                output_file.write("# {0}S in {1}:".format(
                    obj_type, db.q_db_name))
                if frmt in ('grid', 'vertical'):
                    rows = db.get_db_objects(obj_type, column_type, True)
                else:
                    rows = db.get_db_objects(obj_type, column_type, True, True)
                if len(rows[1]) < 1:
                    output_file.write(" (none found)\n")
                else:
                    output_file.write("\n")
                    # Cannot use print_list here because we must manipulate
                    # the behavior of format_tabular_list.
                    list_options = {}
                    if frmt == "vertical":
                        format_vertical_list(output_file, rows[0], rows[1])
                    elif frmt == "tab":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = '\t'
                        format_tabular_list(output_file, rows[0], rows[1],
                                            list_options)
                    elif frmt == "csv":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = ','
                        format_tabular_list(output_file, rows[0], rows[1],
                                            list_options)
                    else:  # default to table format
                        format_tabular_list(output_file, rows[0], rows[1])

    if not quiet:
        output_file.write("#...done.\n")
Exemplo n.º 17
0
def _export_data(source, server_values, db_list, output_file, options):
    """Export data from the specified list of databases.

    This private method retrieves the data for each specified databases in SQL
    format (e.g., INSERT statements) or in a tabular form (GRID, TAB, CSV,
    VERTICAL) to the specified file.

    This private method does not check permissions.

    source[in]         Server instance.
    server_values[in]  Server connection values.
    db_list[in]        List of databases to export.
    output_file[in]    Output file to store the export data.
    options[in]        Dictionary containing the options for the export:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, file_per_tbl,
                       and debug).
    """
    frmt = options.get("format", "sql")
    quiet = options.get("quiet", False)
    file_per_table = options.get("file_per_tbl", False)
    sql_mode = source.select_variable("SQL_MODE")

    # Get tables list.
    table_list = []
    for db_name in db_list:
        source_db = Database(source, db_name, options)
        # Build table list.
        tables = source_db.get_db_objects("TABLE")
        for table in tables:
            table_list.append((db_name, table[0]))

    previous_db = ""
    export_tbl_tasks = []
    for table in table_list:

        # Determine start for processing table from a different database.
        db_name = table[0]
        if previous_db != db_name:
            previous_db = db_name
            if not quiet:
                q_db_name = quote_with_backticks(db_name, sql_mode)
                if frmt == "sql":
                    output_file.write("USE {0};\n".format(q_db_name))
                output_file.write(
                    "# Exporting data from {0}\n".format(q_db_name))
                if file_per_table:
                    output_file.write("# Writing table data to files.\n")

            # Print sample SOURCE command warning even in quiet mode.
            if file_per_table and frmt == 'sql':
                output_file.write("# The following are sample SOURCE commands."
                                  " If needed correct the path to match files "
                                  "location.\n")

        # Check multiprocess table export (only on POSIX systems).
        if options['multiprocess'] > 1 and os.name == 'posix':
            # Create export task.
            # Note: Server connection values are passed in the task dictionary
            # instead of a server instance, otherwise a multiprocessing error
            # is issued when assigning the task to a worker.
            export_task = {
                'srv_con': server_values,
                'table': table,
                'options': options,
            }
            export_tbl_tasks.append(export_task)
        else:
            # Export data from a table (no multiprocessing).
            _export_table_data(source, table, output_file, options)

        # Print SOURCE command if --file-per-table is used and format is SQL.
        if file_per_table and frmt == 'sql':
            tbl_name = ".".join(table)
            output_file.write("# SOURCE {0}\n".format(
                _generate_tbl_filename(tbl_name, frmt)))

    # Export tables concurrently.
    if export_tbl_tasks:
        # Create process pool.
        workers_pool = multiprocessing.Pool(processes=options['multiprocess'])
        # Concurrently export tables.
        res = workers_pool.map_async(multiprocess_tbl_export_task,
                                     export_tbl_tasks)
        workers_pool.close()
        # Get list of temporary files with the exported data.
        tmp_files_list = res.get()
        workers_pool.join()

        # Merge resulting temp files (if generated).
        for tmp_filename in tmp_files_list:
            if tmp_filename:
                tmp_file = open(tmp_filename, 'r')
                shutil.copyfileobj(tmp_file, output_file)
                tmp_file.close()
                os.remove(tmp_filename)

    if not quiet:
        output_file.write("#...done.\n")
Exemplo n.º 18
0
def export_data(source, src_val, db_list, options):
    """Produce data for the tables in a database.

    This method retrieves the data for each table in the databases listed in
    the form of BULK INSERT (SQL) statements or in a tabular form to the file
    specified. The valid values for the format parameter are SQL, CSV, TSV,
    VERITCAL, or GRID.

    source[in]         Server instance
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format, file_per_tbl,
                       and debug)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.table import Table

    format = options.get("format", "sql")
    no_headers = options.get("no_headers", True)
    column_type = options.get("display", "brief")
    single = options.get("single", False)
    skip_blobs = options.get("skip_blobs", False)
    quiet = options.get("quiet", False)
    file_per_table = options.get("file_per_tbl", False)
    skip_views = options.get("skip_views", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)

    if options.get("all", False):
        rows = source.get_all_databases()
        for row in rows:
            if row[0] not in db_list:
                db_list.append(row[0])
                
    # Check if database exists and user permissions on source for all databases
    table_lock_list = []
    table_list = []
    for db_name in db_list:
        source_db = Database(source, db_name)

        # Make a dictionary of the options
        access_options = {
            'skip_views'  : skip_views,
            'skip_procs'  : skip_procs,
            'skip_funcs'  : skip_funcs,
            'skip_grants' : skip_grants,
            'skip_events' : skip_events,
        }

        # Error is source database does not exist
        if not source_db.exists():
            raise UtilDBError("Source database does not exist - %s" % db_name,
                              -1, db_name)
            
        source_db.check_read_access(src_val["user"], src_val["host"],
                                    access_options)

        # Build table list
        tables = source_db.get_db_objects("TABLE")
        for table in tables:
            table_list.append((db_name, table[0]))
        
    old_db = ""
    for table in table_list:
        db_name = table[0]
        tbl_name = "%s.%s" % (db_name, table[1])
        # quote database and table name with backticks
        q_db_name = quote_with_backticks(db_name)
        q_tbl_name = "%s.%s" % (q_db_name, quote_with_backticks(table[1]))
        if not quiet and old_db != db_name:
            old_db = db_name
            if format == "sql":
               print "USE %s;" % q_db_name
            print "# Exporting data from %s" % db_name
            if file_per_table:
                print "# Writing table data to files."

        tbl_options = {
            'verbose'  : False,
            'get_cols' : True,
            'quiet'    : quiet
        }
        cur_table = Table(source, q_tbl_name, tbl_options)
        if single and format not in ("sql", "grid", "vertical"):
            retrieval_mode = -1
            first = True
        else:
            retrieval_mode = 1
            first = False

        message = "# Data for table %s: " % q_tbl_name

        # switch for writing to files
        if file_per_table:
            if format == 'sql':
               file_name = tbl_name + ".sql"
            else:
                file_name = tbl_name + ".%s" % format.lower()
            outfile = open(file_name, "w")
            outfile.write(message + "\n")
        else:
            outfile = None
            print message

        for data_rows in cur_table.retrieve_rows(retrieval_mode):
            _export_row(data_rows, cur_table, format, single,
                        skip_blobs, first, no_headers, outfile)
            if first:
               first = False
 
        if file_per_table:
            outfile.close()
  
    if not quiet:
        print "#...done."

    return True
Exemplo n.º 19
0
def copy_db(src_val, dest_val, db_list, options):
    """Copy a database

    This method will copy a database and all of its objects and data from
    one server (source) to another (destination). Options are available to
    selectively ignore each type of object. The do_drop parameter is
    used to permit the copy to overwrite an existing destination database
    (default is to not overwrite).

    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    dest_val[in]       a dictionary containing connection information for the
                       destination including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, verbose, do_drop, quiet,
                       connections, debug, exclude_names, exclude_patterns)

    Notes:
        do_drop  - if True, the database on the destination will be dropped
                   if it exists (default is False)
        quiet    - do not print any information during operation
                   (default is False)

    Returns bool True = success, False = error
    """
    verbose = options.get("verbose", False)
    quiet = options.get("quiet", False)
    do_drop = options.get("do_drop", False)
    skip_views = options.get("skip_views", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)
    skip_data = options.get("skip_data", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_tables = options.get("skip_tables", False)
    skip_gtid = options.get("skip_gtid", False)
    locking = options.get("locking", "snapshot")

    conn_options = {
        'quiet': quiet,
        'version': "5.1.30",
    }
    servers = connect_servers(src_val, dest_val, conn_options)
    cloning = (src_val == dest_val) or dest_val is None

    source = servers[0]
    if cloning:
        destination = servers[0]
    else:
        destination = servers[1]
        # Test if SQL_MODE is 'NO_BACKSLASH_ESCAPES' in the destination server
        if destination.select_variable("SQL_MODE") == "NO_BACKSLASH_ESCAPES":
            print(
                "# WARNING: The SQL_MODE in the destination server is "
                "'NO_BACKSLASH_ESCAPES', it will be changed temporarily "
                "for data insertion.")

    src_gtid = source.supports_gtid() == 'ON'
    dest_gtid = destination.supports_gtid() == 'ON' if destination else False

    # Get list of all databases from source if --all is specified.
    # Ignore system databases.
    if options.get("all", False):
        # The --all option is valid only if not cloning.
        if not cloning:
            if not quiet:
                print "# Including all databases."
            rows = source.get_all_databases()
            for row in rows:
                db_list.append((row[0], None))  # Keep same name
        else:
            raise UtilError("Cannot copy all databases on the same server.")
    elif not skip_gtid and src_gtid:
        # Check to see if this is a full copy (complete backup)
        all_dbs = source.exec_query("SHOW DATABASES")
        dbs = [db[0] for db in db_list]
        for db in all_dbs:
            if db[0].upper() in [
                    "MYSQL", "INFORMATION_SCHEMA", "PERFORMANCE_SCHEMA"
            ]:
                continue
            if not db[0] in dbs:
                print _GTID_BACKUP_WARNING
                break

    # Do error checking and preliminary work:
    #  - Check user permissions on source and destination for all databases
    #  - Check to see if executing on same server but same db name (error)
    #  - Build list of tables to lock for copying data (if no skipping data)
    #  - Check storage engine compatibility
    for db_name in db_list:
        source_db = Database(source, db_name[0])
        if destination is None:
            destination = source
        if db_name[1] is None:
            db = db_name[0]
        else:
            db = db_name[1]
        dest_db = Database(destination, db)

        # Make a dictionary of the options
        access_options = {
            'skip_views': skip_views,
            'skip_procs': skip_procs,
            'skip_funcs': skip_funcs,
            'skip_grants': skip_grants,
            'skip_events': skip_events,
            'skip_triggers': skip_triggers,
        }

        source_db.check_read_access(src_val["user"], src_val["host"],
                                    access_options)

        # Make a dictionary containing the list of objects from source db
        source_objects = {
            "views": source_db.get_db_objects("VIEW", columns="full"),
            "procs": source_db.get_db_objects("PROCEDURE", columns="full"),
            "funcs": source_db.get_db_objects("FUNCTION", columns="full"),
            "events": source_db.get_db_objects("EVENT", columns="full"),
            "triggers": source_db.get_db_objects("TRIGGER", columns="full"),
        }

        dest_db.check_write_access(dest_val['user'], dest_val['host'],
                                   access_options, source_objects, do_drop)

        # Error is source db and destination db are the same and we're cloning
        if destination == source and db_name[0] == db_name[1]:
            raise UtilError("Destination database name is same as "
                            "source - source = %s, destination = %s" %
                            (db_name[0], db_name[1]))

        # Error is source database does not exist
        if not source_db.exists():
            raise UtilError("Source database does not exist - %s" % db_name[0])

        # Check storage engines
        check_engine_options(destination, options.get("new_engine", None),
                             options.get("def_engine", None), False,
                             options.get("quiet", False))

    # Get replication commands if rpl_mode specified.
    # if --rpl specified, dump replication initial commands
    rpl_info = None

    # Turn off foreign keys if they were on at the start
    destination.disable_foreign_key_checks(True)

    # Get GTID commands
    if not skip_gtid:
        gtid_info = get_gtid_commands(source)
        if src_gtid and not dest_gtid:
            print _NON_GTID_WARNING % ("destination", "source", "to")
        elif not src_gtid and dest_gtid:
            print _NON_GTID_WARNING % ("source", "destination", "from")
    else:
        gtid_info = None
        if src_gtid and not cloning:
            print _GTID_WARNING

    # If cloning, turn off gtid generation
    if gtid_info and cloning:
        gtid_info = None
    # if GTIDs enabled, write the GTID commands
    if gtid_info and dest_gtid:
        # Check GTID version for complete feature support
        destination.check_gtid_version()
        # Check the gtid_purged value too
        destination.check_gtid_executed()
        for cmd in gtid_info[0]:
            print "# GTID operation:", cmd
            destination.exec_query(cmd, {'fetch': False, 'commit': False})

    if options.get("rpl_mode", None):
        new_opts = options.copy()
        new_opts['multiline'] = False
        new_opts['strict'] = True
        rpl_info = get_change_master_command(src_val, new_opts)
        destination.exec_query("STOP SLAVE", {'fetch': False, 'commit': False})

    # Copy (create) objects.
    # We need to delay trigger and events to after data is loaded
    new_opts = options.copy()
    new_opts['skip_triggers'] = True
    new_opts['skip_events'] = True

    # Get the table locks unless we are cloning with lock-all
    if not (cloning and locking == 'lock-all'):
        my_lock = get_copy_lock(source, db_list, options, True)

    _copy_objects(source, destination, db_list, new_opts)

    # If we are cloning, take the write locks prior to copying data
    if cloning and locking == 'lock-all':
        my_lock = get_copy_lock(source, db_list, options, True, cloning)

    # Copy tables data
    if not skip_data and not skip_tables:

        # Copy tables
        for db_name in db_list:

            # Get a Database class instance
            db = Database(source, db_name[0], options)

            # Perform the copy
            # Note: No longer use threads, use multiprocessing instead.
            db.init()
            db.copy_data(db_name[1],
                         options,
                         destination,
                         connections=1,
                         src_con_val=src_val,
                         dest_con_val=dest_val)

    # if cloning with lock-all unlock here to avoid system table lock conflicts
    if cloning and locking == 'lock-all':
        my_lock.unlock()

    # Create triggers for all databases
    if not skip_triggers:
        new_opts = options.copy()
        new_opts['skip_tables'] = True
        new_opts['skip_views'] = True
        new_opts['skip_procs'] = True
        new_opts['skip_funcs'] = True
        new_opts['skip_events'] = True
        new_opts['skip_grants'] = True
        new_opts['skip_create'] = True
        _copy_objects(source, destination, db_list, new_opts, False, False)

    # Create events for all databases
    if not skip_events:
        new_opts = options.copy()
        new_opts['skip_tables'] = True
        new_opts['skip_views'] = True
        new_opts['skip_procs'] = True
        new_opts['skip_funcs'] = True
        new_opts['skip_triggers'] = True
        new_opts['skip_grants'] = True
        new_opts['skip_create'] = True
        _copy_objects(source, destination, db_list, new_opts, False, False)

    if not (cloning and locking == 'lock-all'):
        my_lock.unlock()

    # if GTIDs enabled, write the GTID-related commands
    if gtid_info and dest_gtid:
        print "# GTID operation:", gtid_info[1]
        destination.exec_query(gtid_info[1])

    if options.get("rpl_mode", None):
        for cmd in rpl_info[_RPL_COMMANDS]:
            if cmd[0] == '#' and not quiet:
                print cmd
            else:
                if verbose:
                    print cmd
                destination.exec_query(cmd)
        destination.exec_query("START SLAVE;")

    # Turn on foreign keys if they were on at the start
    destination.disable_foreign_key_checks(False)

    if not quiet:
        print "#...done."
    return True
Exemplo n.º 20
0
def export_metadata(source, src_val, db_list, options):
    """Produce rows to be used to recreate objects in a database.

    This method retrieves the objects for each database listed in the form
    of CREATE (SQL) statements or in a tabular form to the file specified.
    The valid values for the format parameter are SQL, CSV, TSV, VERTICAL,
    or GRID.

    source[in]         Server instance
    src_val[in]        a dictionary containing connection information for the
                       source including:
                       (user, password, host, port, socket)
    options[in]        a dictionary containing the options for the copy:
                       (skip_tables, skip_views, skip_triggers, skip_procs,
                       skip_funcs, skip_events, skip_grants, skip_create,
                       skip_data, no_header, display, format,
                       debug, exclude_names, exclude_patterns)

    Returns bool True = success, False = error
    """

    from mysql.utilities.common.database import Database
    from mysql.utilities.common.format import format_tabular_list
    from mysql.utilities.common.format import format_vertical_list

    format = options.get("format", "sql")
    no_headers = options.get("no_headers", False)
    column_type = options.get("display", "brief")
    skip_create = options.get("skip_create", False)
    quiet = options.get("quiet", False)
    skip_tables = options.get("skip_tables", False)
    skip_views = options.get("skip_views", False)
    skip_triggers = options.get("skip_triggers", False)
    skip_procs = options.get("skip_procs", False)
    skip_funcs = options.get("skip_funcs", False)
    skip_events = options.get("skip_events", False)
    skip_grants = options.get("skip_grants", False)

    if options.get("all", False):
        rows = source.get_all_databases()
        for row in rows:
            db_list.append(row[0])

    # Check user permissions on source for all databases
    for db_name in db_list:
        source_db = Database(source, db_name)
        # Make a dictionary of the options
        access_options = {
            'skip_views'  : skip_views,
            'skip_procs'  : skip_procs,
            'skip_funcs'  : skip_funcs,
            'skip_grants' : skip_grants,
            'skip_events' : skip_events,
        }

        source_db.check_read_access(src_val["user"], src_val["host"],
                                    access_options)
    
    for db_name in db_list:

        # Get a Database class instance
        db = Database(source, db_name, options)

        # Error is source database does not exist
        if not db.exists():
            raise UtilDBError("Source database does not exist - %s" % db_name,
                              -1, db_name)

        if not quiet:
            print "# Exporting metadata from %s" % db_name

        # Perform the extraction
        if format == "sql":
            db.init()
            # quote database name with backticks
            q_db_name = quote_with_backticks(db_name)
            if not skip_create:
                print "DROP DATABASE IF EXISTS %s;" % q_db_name
                print "CREATE DATABASE %s;" % q_db_name
            print "USE %s;" % q_db_name
            for dbobj in db.get_next_object():
                if dbobj[0] == "GRANT" and not skip_grants:
                    if not quiet:
                        print "# Grant:"
                    if dbobj[1][3]:
                        create_str = "GRANT %s ON %s.%s TO %s;" % \
                                     (dbobj[1][1], q_db_name,
                                      quote_with_backticks(dbobj[1][3]), 
                                      dbobj[1][0])
                    else:
                        create_str = "GRANT %s ON %s.* TO %s;" % \
                                     (dbobj[1][1], q_db_name, dbobj[1][0])
                    if create_str.find("%"):
                        create_str = re.sub("%", "%%", create_str)
                    print create_str
                else:
                    if not quiet:
                        print "# %s: %s.%s" % (dbobj[0], db_name,
                                               dbobj[1][0])
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        print "DELIMITER ||"
                    print "%s;" % db.get_create_statement(db_name,
                                                          dbobj[1][0],
                                                          dbobj[0])
                    if (dbobj[0] == "PROCEDURE" and not skip_procs) or \
                       (dbobj[0] == "FUNCTION" and not skip_funcs) or \
                       (dbobj[0] == "EVENT" and not skip_events) or \
                       (dbobj[0] == "TRIGGER" and not skip_triggers):
                        print "||"
                        print "DELIMITER ;"
        else:
            objects = []
            if not skip_tables:
                objects.append("TABLE")
            if not skip_views:
                objects.append("VIEW")
            if not skip_triggers:
                objects.append("TRIGGER")
            if not skip_procs:
                objects.append("PROCEDURE")
            if not skip_funcs:
                objects.append("FUNCTION")
            if not skip_events:
                objects.append("EVENT")
            if not skip_grants:
                objects.append("GRANT")
            for obj_type in objects:
                sys.stdout.write("# %sS in %s:" % (obj_type, db_name))
                if format in ('grid', 'vertical'):
                    rows = db.get_db_objects(obj_type, column_type, True)
                else:
                    rows = db.get_db_objects(obj_type, column_type, True, True)
                if len(rows[1]) < 1:
                    print " (none found)"
                else:
                    print
                    # Cannot use print_list here becasue we must manipulate
                    # the behavior of format_tabular_list
                    list_options = {}
                    if format == "vertical":
                        format_vertical_list(sys.stdout, rows[0], rows[1])
                    elif format == "tab":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = '\t'
                        format_tabular_list(sys.stdout, rows[0], rows[1],
                                            list_options)
                    elif format == "csv":
                        list_options['print_header'] = not no_headers
                        list_options['separator'] = ','
                        format_tabular_list(sys.stdout, rows[0], rows[1],
                                            list_options)
                    else:  # default to table format
                        format_tabular_list(sys.stdout, rows[0], rows[1])

    if not quiet:
        print "#...done."

    return True