Example #1
0
def check_running_tasks(label, name):
    """
    Check for any currently running Sync or Export tasks
    Exits script if any Synchronize or Export tasks are found in a running state.
    """
    #pylint: disable-msg=R0912,R0914,R0915
    tasks = helpers.get_p_json(
        helpers.FOREMAN_API + "tasks/", \
                json.dumps(
                    {
                        "per_page": "100",
                    }
                ))

    # From the list of tasks, look for any running export or sync jobs.
    # If e have any we exit, as we can't export in this state.
    ok_to_export = True
    for task_result in tasks['results']:
        if task_result['state'] == 'running':
            if task_result['humanized']['action'] == 'Export':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to export task in progress"
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
            if task_result['humanized']['action'] == 'Synchronize':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to sync task in progress"
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
        if task_result['state'] == 'paused':
            if task_result['humanized']['action'] == 'Export':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to paused export task - Please resolve this issue."
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
            if task_result['humanized']['action'] == 'Synchronize':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to paused sync task."
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False

    check_incomplete_sync()
    return ok_to_export
def check_running_tasks(label, name):
    """
    Check for any currently running Sync or Export tasks
    Exits script if any Synchronize or Export tasks are found in a running state.
    """
    #pylint: disable-msg=R0912,R0914,R0915
    tasks = helpers.get_p_json(
        helpers.FOREMAN_API + "tasks/", \
                json.dumps(
                    {
                        "per_page": "100",
                    }
                ))

    # From the list of tasks, look for any running export or sync jobs.
    # If e have any we exit, as we can't export in this state.
    ok_to_export = True
    for task_result in tasks['results']:
        if task_result['state'] == 'running':
            if task_result['humanized']['action'] == 'Export':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to export task in progress"
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
            if task_result['humanized']['action'] == 'Synchronize':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to sync task in progress"
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
        if task_result['state'] == 'paused':
            if task_result['humanized']['action'] == 'Export':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to paused export task - Please resolve this issue."
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False
            if task_result['humanized']['action'] == 'Synchronize':
                if task_result['input']['repository']['label'] == label:
                    msg = "Unable to export due to paused sync task."
                    if name == 'DoV':
                        helpers.log_msg(msg, 'ERROR')
                        sys.exit(-1)
                    else:
                        helpers.log_msg(msg, 'WARNING')
                        ok_to_export = False

    check_incomplete_sync()
    return ok_to_export
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                do_import = True
                repos_to_sync.append(repo_result['id'])
        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    msg = "Repo ids to sync: " + str(repos_to_sync)
    helpers.log_msg(msg, 'DEBUG')

    msg = "Syncing repositories"
    helpers.log_msg(msg, 'INFO')
    print msg
    task_id = helpers.post_json(
        helpers.KATELLO_API + "repositories/bulk/sync", \
            json.dumps(
                {
                    "ids": repos_to_sync,
                }
            ))["id"]
    msg = "Repo sync task id = " + task_id
    helpers.log_msg(msg, 'DEBUG')

    return task_id, delete_override
def get_cv(org_id, cleanup_list, keep):
    """Get the content views"""

    # Query API to get all content views for our org
    cvs = helpers.get_p_json(
        helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/",
        json.dumps({"per_page":"10000"})
    )
    ver_list = collections.OrderedDict()
    ver_descr = collections.OrderedDict()
    ver_keep = collections.OrderedDict()

    # Sort the CVS so that composites are considered first
    cv_results = sorted(cvs['results'], key=lambda k: k[u'composite'], reverse=True)

    for cv_result in cv_results:
        # We will never clean the DOV
        if cv_result['name'] != "Default Organization View":
            # Handle specific includes
            if cleanup_list:
                # The list contains dictionaries as elements. Process each dictionary
                for cv in cleanup_list:
                    # If the CV name does not appear in our config list, skip
                    if cv['view'] != cv_result['name']:
                        msg = "Skipping " + cv_result['name']
                        helpers.log_msg(msg, 'DEBUG')
                        continue
                    else:
                        msg = "Processing content view '" + cv_result['name'] + "' " \
                            + str(cv_result['id'])
                        helpers.log_msg(msg, 'DEBUG')

                        # Add the next version of the view, and how many versions to keep
                        ver_list[cv_result['id']] = cv_result['id']
                        ver_descr[cv_result['id']] = cv_result['name']
                        ver_keep[cv_result['id']] = cv['keep']

            # Handle the 'all' option
            else:
                msg = "Processing content view '" + cv_result['name'] + "' " \
                    + str(cv_result['id'])
                helpers.log_msg(msg, 'DEBUG')

                # Add the next version of the view, and how many versions to keep
                ver_list[cv_result['id']] = cv_result['id']
                ver_descr[cv_result['id']] = cv_result['name']
                ver_keep[cv_result['id']] = keep


    return ver_list, ver_descr, ver_keep
Example #5
0
def get_product(org_id, cp_id):
    """
    Find and return the label of the given product ID
    """
    prod_list = helpers.get_p_json(
        helpers.KATELLO_API + "/products/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    for prod in prod_list['results']:
        if prod['cp_id'] == cp_id:
            prodlabel = prod['label']
            return prodlabel
Example #6
0
def get_product(org_id, cp_id):
    """
    Find and return the label of the given product ID
    """
    prod_list = helpers.get_p_json(
        helpers.KATELLO_API + "/products/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    for prod in prod_list['results']:
        if prod['cp_id'] == cp_id:
            prodlabel = prod['label']
            return prodlabel
Example #7
0
def check_running_tasks(clear):
    """
    Check for any currently running Sync tasks
    Checks for any Synchronize tasks in running/paused or Incomplete state.
    """
    #pylint: disable-msg=R0912,R0914,R0915
    # Clear the screen
    if clear:
        os.system('clear')

    print helpers.HEADER + "Checking for running/paused yum sync tasks..." + helpers.ENDC
    tasks = helpers.get_p_json(
        helpers.FOREMAN_API + "tasks/", \
            json.dumps(
                {
                    "per_page": "100",
                }
            ))

    # From the list of tasks, look for any running export or sync jobs.
    # If e have any we exit, as we can't export in this state.
    running_sync = 0
    for task_result in tasks['results']:
        if task_result['state'] == 'running' and task_result['label'] != 'Actions::BulkAction':
            if task_result['humanized']['action'] == 'Synchronize':
                running_sync = 1
                print helpers.BOLD + "Running: " + helpers.ENDC \
                    + task_result['input']['repository']['name']
        if task_result['state'] == 'paused' and task_result['label'] != 'Actions::BulkAction':
            if task_result['humanized']['action'] == 'Synchronize':
                running_sync = 1
                print helpers.ERROR + "Paused:  " + helpers.ENDC \
                    + task_result['input']['repository']['name']

    if not running_sync:
        print helpers.GREEN + "None detected" + helpers.ENDC


    # Check any repos marked as Sync Incomplete
    print helpers.HEADER + "\nChecking for incomplete (stopped) yum sync tasks..." + helpers.ENDC
    repo_list = helpers.get_json(
        helpers.KATELLO_API + "/content_view_versions")

    # Extract the list of repo ids, then check the state of each one.
    incomplete_sync = 0
    for repo in repo_list['results']:
        for repo_id in repo['repositories']:
            repo_status = helpers.get_json(
                helpers.KATELLO_API + "/repositories/" + str(repo_id['id']))

            if repo_status['content_type'] == 'yum':
                if repo_status['last_sync'] is None:
                    if repo_status['library_instance_id'] is None:
#                        incomplete_sync = 1
#                        print helpers.ERROR + "Broken Repo: " + helpers.ENDC + repo_status['name']
                        print helpers.WARNING + "Never Synchronized: " + helpers.ENDC + repo_status['name']
                elif repo_status['last_sync']['state'] == 'stopped':
                    if repo_status['last_sync']['result'] == 'warning':
                        incomplete_sync = 1
                        print helpers.WARNING + "Incomplete: " + helpers.ENDC + repo_status['name']
                    else:
                        msg = repo_status['name'] + " - last_sync: " + repo_status['last_sync']['ended_at']
                        helpers.log_msg(msg, 'DEBUG')

    # If we have detected incomplete sync tasks, ask the user if they want to export anyway.
    # This isn't fatal, but *MAY* lead to inconsistent repositories on the dieconnected sat.
    if not incomplete_sync:
        print helpers.GREEN + "No incomplete syncs detected\n" + helpers.ENDC
    else:
        print "\n"

    # Exit the loop if both tests are clear
    if not running_sync and not incomplete_sync:
        sys.exit(0)
Example #8
0
def main(args):
    """
    Main Routine
    """
    #pylint: disable-msg=R0912,R0914,R0915

    if helpers.DISCONNECTED:
        msg = "Export cannot be run on the disconnected Satellite host"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Who is running this script?
    runuser = helpers.who_is_running()

    # Set the base dir of the script and where the var data is
    global dir
    global vardir
    dir = os.path.dirname(__file__)
    vardir = os.path.join(dir, 'var')

    # Log the fact we are starting
    msg = "------------- Content export started by " + runuser + " ----------------"
    helpers.log_msg(msg, 'INFO')

    # Check for sane input
    parser = argparse.ArgumentParser(
        description='Performs Export of Default Content View.')
    group = parser.add_mutually_exclusive_group()
    # pylint: disable=bad-continuation
    parser.add_argument('-o',
                        '--org',
                        help='Organization (Uses default if not specified)',
                        required=False)
    parser.add_argument('-e',
                        '--env',
                        help='Environment config file',
                        required=False)
    group.add_argument('-a',
                       '--all',
                       help='Export ALL content',
                       required=False,
                       action="store_true")
    group.add_argument('-i',
                       '--incr',
                       help='Incremental Export of content since last run',
                       required=False,
                       action="store_true")
    group.add_argument('-s',
                       '--since',
                       help='Export content since YYYY-MM-DD HH:MM:SS',
                       required=False,
                       type=helpers.valid_date)
    parser.add_argument('-l',
                        '--last',
                        help='Display time of last export',
                        required=False,
                        action="store_true")
    parser.add_argument('-n',
                        '--nogpg',
                        help='Skip GPG checking',
                        required=False,
                        action="store_true")
    parser.add_argument('-r',
                        '--repodata',
                        help='Include repodata for repos with no new packages',
                        required=False,
                        action="store_true")
    args = parser.parse_args()

    # Set our script variables from the input args
    if args.org:
        org_name = args.org
    else:
        org_name = helpers.ORG_NAME
    since = args.since

    # Record where we are running from
    script_dir = str(os.getcwd())

    # Get the org_id (Validates our connection to the API)
    org_id = helpers.get_org_id(org_name)
    exported_repos = []
    # If a specific environment is requested, find and read that config file
    repocfg = os.path.join(dir, 'config/' + args.env + '.yml')
    if args.env:
        if not os.path.exists(repocfg):
            print "ERROR: Config file " + repocfg + " not found."
            sys.exit(-1)
        cfg = yaml.safe_load(open(repocfg, 'r'))
        ename = args.env
        erepos = cfg["env"]["repos"]
        msg = "Specific environment export called for " + ename + ". Configured repos:"
        helpers.log_msg(msg, 'DEBUG')
        for repo in erepos:
            msg = "  - " + repo
            helpers.log_msg(msg, 'DEBUG')

    else:
        ename = 'DoV'
        label = 'DoV'
        msg = "DoV export called"
        helpers.log_msg(msg, 'DEBUG')

    # Get the current time - this will be the 'last export' time if the export is OK
    start_time = datetime.datetime.strftime(datetime.datetime.now(),
                                            '%Y-%m-%d %H:%M:%S')
    print "START: " + start_time + " (" + ename + " export)"

    # Read the last export date pickle for our selected repo group.
    export_times = read_pickle(ename)
    export_type = 'incr'

    if args.all:
        print "Performing full content export for " + ename
        export_type = 'full'
        since = False
    else:
        if not since:
            since = False
            if args.last:
                if export_times:
                    print "Last successful export for " + ename + ":"
                    for time in export_times:
                        repo = "{:<70}".format(time)
                        print repo[:70] + '\t' + str(export_times[time])
                else:
                    print "Export has never been performed for " + ename
                sys.exit(-1)
            if not export_times:
                print "No prior export recorded for " + ename + ", performing full content export"
                export_type = 'full'
        else:
            # Re-populate export_times dictionary so each repo has 'since' date
            since_export = str(since)

            # We have our timestamp so we can kick of an incremental export
            print "Incremental export of content for " + ename + " synchronised after " \
            + str(since)

    # Check the available space in /var/lib/pulp
    check_disk_space(export_type)

    # Remove any previous exported content left behind by prior unclean exit
    if os.path.exists(helpers.EXPORTDIR + '/export'):
        msg = "Removing existing export directory"
        helpers.log_msg(msg, 'DEBUG')
        shutil.rmtree(helpers.EXPORTDIR + '/export')

    # Collect a list of enabled repositories. This is needed for:
    # 1. Matching specific repo exports, and
    # 2. Running import sync per repo on the disconnected side
    repolist = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    # If we are running a full DoV export we run a different set of API calls...
    if ename == 'DoV':
        cola = "Exporting DoV"
        if export_type == 'incr' and 'DoV' in export_times:
            last_export = export_times['DoV']
            if since:
                last_export = since_export
            colb = "(INCR since " + last_export + ")"
        else:
            export_type = 'full'
            last_export = '2000-01-01 12:00:00'  # This is a dummy value, never used.
            colb = "(FULL)"
        msg = cola + " " + colb
        helpers.log_msg(msg, 'INFO')
        output = "{:<70}".format(cola)
        print output[:70] + ' ' + colb

        # Check if there are any currently running tasks that will conflict with an export
        check_running_tasks(label, ename)

        # Get the version of the CV (Default Org View) to export
        dov_ver = get_cv(org_id)

        # Now we have a CV ID and a starting date, and no conflicting tasks, we can export
        export_id = export_cv(dov_ver, last_export, export_type)

        # Now we need to wait for the export to complete
        helpers.wait_for_task(export_id, 'export')

        # Check if the export completed OK. If not we exit the script.
        tinfo = helpers.get_task_status(export_id)
        if tinfo['state'] != 'running' and tinfo['result'] == 'success':
            msg = "Content View Export OK"
            helpers.log_msg(msg, 'INFO')
            print helpers.GREEN + msg + helpers.ENDC

            # Update the export timestamp for this repo
            export_times['DoV'] = start_time

            # Generate a list of repositories that were exported
            for repo_result in repolist['results']:
                if repo_result['content_type'] == 'yum':
                    # Add the repo to the successfully exported list
                    exported_repos.append(repo_result['label'])

        else:
            msg = "Content View Export FAILED"
            helpers.log_msg(msg, 'ERROR')
            sys.exit(-1)

    else:
        # Verify that defined repos exist in Satellite
        for repo in erepos:
            repo_in_sat = False
            for repo_x in repolist['results']:
                if re.findall("\\b" + repo + "\\b$", repo_x['label']):
                    repo_in_sat = True
                    break
            if repo_in_sat == False:
                msg = "'" + repo + "' not found in Satellite"
                helpers.log_msg(msg, 'WARNING')

        # Process each repo
        for repo_result in repolist['results']:
            if repo_result['content_type'] == 'yum':
                # If we have a match, do the export
                if repo_result['label'] in erepos:
                    # Extract the last export time for this repo
                    orig_export_type = export_type
                    cola = "Export " + repo_result['label']
                    if export_type == 'incr' and repo_result[
                            'label'] in export_times:
                        last_export = export_times[repo_result['label']]
                        if since:
                            last_export = since_export
                        colb = "(INCR since " + last_export + ")"
                    else:
                        export_type = 'full'
                        last_export = '2000-01-01 12:00:00'  # This is a dummy value, never used.
                        colb = "(FULL)"
                    msg = cola + " " + colb
                    helpers.log_msg(msg, 'INFO')
                    output = "{:<70}".format(cola)
                    print output[:70] + ' ' + colb

                    # Check if there are any currently running tasks that will conflict
                    ok_to_export = check_running_tasks(repo_result['label'],
                                                       ename)

                    if ok_to_export:
                        # Trigger export on the repo
                        export_id = export_repo(repo_result['id'], last_export,
                                                export_type)

                        # Now we need to wait for the export to complete
                        helpers.wait_for_task(export_id, 'export')

                        # Check if the export completed OK. If not we exit the script.
                        tinfo = helpers.get_task_status(export_id)
                        if tinfo['state'] != 'running' and tinfo[
                                'result'] == 'success':
                            # Count the number of exported packages
                            # First resolve the product label - this forms part of the export path
                            product = get_product(
                                org_id, repo_result['product']['cp_id'])
                            # Now we can build the export path itself
                            basepath = helpers.EXPORTDIR + "/" + org_name + "-" + product + "-" + repo_result[
                                'label']
                            if export_type == 'incr':
                                basepath = basepath + "-incremental"
                            exportpath = basepath + "/" + repo_result[
                                'relative_path']
                            msg = "\nExport path = " + exportpath
                            helpers.log_msg(msg, 'DEBUG')

                            os.chdir(exportpath)
                            numrpms = len([
                                f for f in os.walk(".").next()[2]
                                if f[-4:] == ".rpm"
                            ])

                            msg = "Repository Export OK (" + str(
                                numrpms) + " new packages)"
                            helpers.log_msg(msg, 'INFO')
                            print helpers.GREEN + msg + helpers.ENDC

                            # Update the export timestamp for this repo
                            export_times[repo_result['label']] = start_time

                            # Add the repo to the successfully exported list
                            if numrpms != 0 or args.repodata:
                                msg = "Adding " + repo_result[
                                    'label'] + " to export list"
                                helpers.log_msg(msg, 'DEBUG')
                                exported_repos.append(repo_result['label'])
                            else:
                                msg = "Not including repodata for empty repo " + repo_result[
                                    'label']
                                helpers.log_msg(msg, 'DEBUG')

                        else:
                            msg = "Export FAILED"
                            helpers.log_msg(msg, 'ERROR')

                        # Reset the export type to the user specified, in case we overrode it.
                        export_type = orig_export_type

                else:
                    msg = "Skipping  " + repo_result['label']
                    helpers.log_msg(msg, 'DEBUG')

            # Handle FILE type exports (ISO repos)
            elif repo_result['content_type'] == 'file':
                # If we have a match, do the export
                if repo_result['label'] in erepos:
                    # Extract the last export time for this repo
                    orig_export_type = export_type
                    cola = "Export " + repo_result['label']
                    if export_type == 'incr' and repo_result[
                            'label'] in export_times:
                        last_export = export_times[repo_result['label']]
                        if since:
                            last_export = since_export
                        colb = "(INCR since " + last_export + ")"
                    else:
                        export_type = 'full'
                        last_export = '2000-01-01 12:00:00'  # This is a dummy value, never used.
                        colb = "(FULL)"
                    msg = cola + " " + colb
                    helpers.log_msg(msg, 'INFO')
                    output = "{:<70}".format(cola)
                    print output[:70] + ' ' + colb

                    # Check if there are any currently running tasks that will conflict
                    ok_to_export = check_running_tasks(repo_result['label'],
                                                       ename)

                    if ok_to_export:
                        # Trigger export on the repo
                        numfiles = export_iso(repo_result['id'],
                                              repo_result['label'],
                                              repo_result['relative_path'],
                                              last_export, export_type)

                        # Reset the export type to the user specified, in case we overrode it.
                        export_type = orig_export_type

                        # Update the export timestamp for this repo
                        export_times[repo_result['label']] = start_time

                        # Add the repo to the successfully exported list
                        if numfiles != 0 or args.repodata:
                            msg = "Adding " + repo_result[
                                'label'] + " to export list"
                            helpers.log_msg(msg, 'DEBUG')
                            exported_repos.append(repo_result['label'])
                        else:
                            msg = "Not including repodata for empty repo " + repo_result[
                                'label']
                            helpers.log_msg(msg, 'DEBUG')

                else:
                    msg = "Skipping  " + repo_result['label']
                    helpers.log_msg(msg, 'DEBUG')

    # Combine resulting directory structures into a single repo format (top level = /content)
    prep_export_tree(org_name)

    # Now we need to process the on-disk export data.
    # Define the location of our exported data.
    export_dir = helpers.EXPORTDIR + "/export"

    # Write out the list of exported repos. This will be transferred to the disconnected system
    # and used to perform the repo sync tasks during the import.
    pickle.dump(exported_repos, open(export_dir + '/exported_repos.pkl', 'wb'))

    # Run GPG Checks on the exported RPMs
    if not args.nogpg:
        do_gpg_check(export_dir)

    # Add our exported data to a tarfile
    create_tar(export_dir, ename)

    # We're done. Write the start timestamp to file for next time
    os.chdir(script_dir)
    pickle.dump(export_times, open(vardir + '/exports_' + ename + '.pkl',
                                   "wb"))

    # And we're done!
    print helpers.GREEN + "Export complete.\n" + helpers.ENDC
    print 'Please transfer the contents of ' + helpers.EXPORTDIR + \
        ' to your disconnected Satellite system content import location.\n' \
        'Once transferred, please run ' + helpers.BOLD + ' sat_import' \
        + helpers.ENDC + ' to extract it.'
Example #9
0
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                # Ensure we have an exact match on the repo label
                if repo == repo_result['label']:
                    do_import = True
                    repos_to_sync.append(repo_result['id'])

                    # Ensure Mirror-on-sync flag is set to FALSE to make sure incremental
                    # import does not (cannot) delete existing packages.
                    msg = "Setting mirror-on-sync=false for repo id " + str(
                        repo_result['id'])
                    helpers.log_msg(msg, 'DEBUG')
                    helpers.put_json(
                        helpers.KATELLO_API + "/repositories/" + str(repo_result['id']), \
                            json.dumps(
                                {
                                    "mirror_on_sync": False
                                }
                            ))

        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    # If we get to here and nothing was added to repos_to_sync we will abort the import.
    # This will probably occur on the initial import - nothing will be enabled in Satellite.
    # Also if there are no updates during incremental sync.
    if not repos_to_sync:
        msg = "No updates in imported content - skipping sync"
        helpers.log_msg(msg, 'WARNING')
        return
    else:
        msg = "Repo ids to sync: " + str(repos_to_sync)
        helpers.log_msg(msg, 'DEBUG')

        msg = "Syncing repositories"
        helpers.log_msg(msg, 'INFO')
        print msg

        # Break repos_to_sync into groups of n
        repochunks = [
            repos_to_sync[i:i + helpers.SYNCBATCH]
            for i in range(0, len(repos_to_sync), helpers.SYNCBATCH)
        ]

        # Loop through the smaller batches of repos and sync them
        for chunk in repochunks:
            chunksize = len(chunk)
            msg = "Syncing repo batch " + str(chunk)
            helpers.log_msg(msg, 'DEBUG')
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/bulk/sync", \
                    json.dumps(
                        {
                            "ids": chunk,
                        }
                    ))["id"]
            msg = "Repo sync task id = " + task_id
            helpers.log_msg(msg, 'DEBUG')

            # Now we need to wait for the sync to complete
            helpers.wait_for_task(task_id, 'sync')

            tinfo = helpers.get_task_status(task_id)
            if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                msg = "Batch of " + str(chunksize) + " repos complete"
                helpers.log_msg(msg, 'INFO')
                print helpers.GREEN + msg + helpers.ENDC
            else:
                msg = "Batch sync has errors"
                helpers.log_msg(msg, 'WARNING')

        return delete_override
Example #10
0
def check_counts(org_id, package_count, count):
    """
    Verify the number of pkgs/errutum in each repo match the sync host.
    Input is a dictionary loaded from a pickle that was created on the sync
    host in format  {Repo_Label, pkgs:erratum}
    """

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # First loop through the repos in the import dict and find the local ID
    table_data = []
    display_data = False
    for repo, counts in package_count.iteritems():
        # Split the count data into packages and erratum
        sync_pkgs = counts.split(':')[0]
        sync_erratum = counts.split(':')[1]

        # Loop through each repo and count the local pkgs in each repo
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                # Ensure we have an exact match on the repo label
                if repo == repo_result['label']:
                    local_pkgs, local_erratum = count_packages(
                        repo_result['id'])

                    # Set the output colour of the table entry based on the pkg counts
                    if int(local_pkgs) == int(sync_pkgs):
                        colour = helpers.GREEN
                        display = False
                    elif int(local_pkgs) == 0 and int(sync_pkgs) != 0:
                        colour = helpers.RED
                        display = True
                        display_data = True
                    elif int(local_pkgs) < int(sync_pkgs):
                        colour = helpers.YELLOW
                        display = True
                        display_data = True
                    else:
                        # If local_pkg > sync_pkg - can happen due to 'mirror on sync' option
                        # - sync host deletes old pkgs. If this is the case we cannot verify
                        # an exact package status so we'll set BLUE
                        colour = helpers.BLUE
                        display = True
                        display_data = True

                    # Tuncate the repo label to 70 chars and build the table row
                    reponame = "{:<70}".format(repo)
                    # Add all counts if it has been requested
                    if count:
                        display_data = True
                        table_data.append([
                            colour, repo[:70],
                            str(sync_pkgs),
                            str(local_pkgs), helpers.ENDC
                        ])
                    else:
                        # Otherwise only add counts that are non-green (display = True)
                        if display:
                            table_data.append([
                                colour, repo[:70],
                                str(sync_pkgs),
                                str(local_pkgs), helpers.ENDC
                            ])

    if display_data:
        msg = '\nRepository package count verification...'
        helpers.log_msg(msg, 'INFO')
        print msg

        # Print Table header
        header = ["", "Repository", "SyncHost", "ThisHost", ""]
        header1 = [
            "", "------------------------------------------------------------",
            "--------", "--------", ""
        ]
        row_format = "{:<1} {:<70} {:>9} {:>9} {:<1}"
        print row_format.format(*header)
        print row_format.format(*header1)

        # Print the table rows
        for row in table_data:
            print row_format.format(*row)
        print '\n'
Example #11
0
def main(args):
    """
    Main Routine
    """
    #pylint: disable-msg=R0912,R0914,R0915

    # Who is running this script?
    runuser = helpers.who_is_running()

    # Set the base dir of the script and where the var data is
    global dir
    global vardir
    dir = os.path.dirname(__file__)
    vardir = os.path.join(dir, 'var')
    confdir = os.path.join(dir, 'config')

    # Check for sane input
    parser = argparse.ArgumentParser(
        description='Exports puppet modules in puppet-forge-server format.')
    # pylint: disable=bad-continuation
    parser.add_argument('-o',
                        '--org',
                        help='Organization (Uses default if not specified)',
                        required=False)
    parser.add_argument('-r',
                        '--repo',
                        help='Puppetforge repo label',
                        required=False)
    parser.add_argument(
        '-t',
        '--type',
        help='Puppetforge server type (puppet-forge-server|artifiactory)',
        required=False)
    parser.add_argument('-s',
                        '--server',
                        help='puppet-forge-server hostname',
                        required=False)
    parser.add_argument('-m',
                        '--modulepath',
                        help='path to puppet-forge-server modules',
                        required=False)
    parser.add_argument(
        '-u',
        '--user',
        help=
        'Username to push modules to server as (default is user running script)',
        required=False)
    parser.add_argument(
        '-p',
        '--password',
        help='Password (token) for username to push modules to Artifactory',
        required=False)
    args = parser.parse_args()

    # Set our script variables from the input args
    if args.org:
        org_name = args.org
    else:
        org_name = helpers.ORG_NAME

    # Define the type of puppet-forge server
    if args.type:
        pftype = args.type
    else:
        if not helpers.PFMETHOD:
            print "Puppet forge server type not specified"
            sys.exit(1)
        else:
            pftype = helpers.PFMETHOD

    # Define the puppet-forge-server hostname
    if args.server:
        pfserver = args.server
    else:
        if not helpers.PFSERVER:
            print "Puppet forge server not defined"
            sys.exit(1)
        else:
            pfserver = helpers.PFSERVER

    # Set the remote (puppet-forge-server) modules directory
    if args.modulepath:
        modpath = args.modulepath
    else:
        if not helpers.PFMODPATH:
            print "Puppet forge module path not defined"
            sys.exit(1)
        else:
            modpath = helpers.PFMODPATH

    # Set the username to use to push modules
    if args.user:
        pfuser = args.user
    else:
        pfuser = helpers.PFUSER

    # Read in the token for Artifiactory
    if args.password:
        pftoken = args.password
    else:
        pftoken = helpers.PFTOKEN

    # Record where we are running from
    script_dir = str(os.getcwd())

    # Get the org_id (Validates our connection to the API)
    org_id = helpers.get_org_id(org_name)

    # Read the repo label given by the user
    if args.repo:
        pfrepo = args.repo
    else:
        print "Puppetforge repo not defined"
        sys.exit(1)

    # Remove any previous exported content left behind by prior unclean exit
    if os.path.exists(helpers.EXPORTDIR + '/export'):
        msg = "Removing existing export directory"
        helpers.log_msg(msg, 'DEBUG')
        shutil.rmtree(helpers.EXPORTDIR + '/export')

    # Collect a list of enabled repositories. This is needed for:
    # 1. Matching specific repo exports, and
    # 2. Running import sync per repo on the disconnected side
    repolist = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    # Process each repo
    for repo_result in repolist['results']:
        if repo_result['content_type'] == 'puppet':
            # If we have a match, do the export
            if repo_result['label'] == pfrepo:

                # Trigger export on the repo
                numfiles = export_puppet(repo_result['id'],
                                         repo_result['label'],
                                         repo_result['relative_path'], 'full')

            else:
                msg = "Skipping  " + repo_result['label']
                helpers.log_msg(msg, 'DEBUG')

    # Now we need to process the on-disk export data.
    # Define the location of our exported data.
    export_dir = helpers.EXPORTDIR + "/puppetforge"

    if (pftype == 'puppet-forge-server'):
        # Method for posting to puppet-forge-server
        os.chdir(script_dir)
        copy_to_pfserver(export_dir, pfserver, modpath, pfuser)

    elif (pftype == 'artifactory'):
        # Method for posting to Artifactory repository
        for module in os.listdir(export_dir):
            print("Posing: " + module)
            postModule(module, export_dir, pfserver, modpath, pfuser, pftoken)

    else:
        print("Unknown puppet-forge server type defined")
        sys.exit(1)

    # And we're done!
    print helpers.GREEN + "Puppet Forge export complete.\n" + helpers.ENDC
    sys.exit(0)
Example #12
0
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                do_import = True
                repos_to_sync.append(repo_result['id'])

                # Ensure Mirror-on-sync flag is set to FALSE to make sure incremental
                # import does not (cannot) delete existing packages.
                msg = "Setting mirror-on-sync=false for repo id " + str(repo_result['id'])
                helpers.log_msg(msg, 'DEBUG')
                helpers.put_json(
                    helpers.KATELLO_API + "/repositories/" + str(repo_result['id']), \
                        json.dumps(
                            {
                                "mirror_on_sync": False
                            }
                        ))

        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    # If we get to here and nothing was added to repos_to_sync we will abort the import.
    # This will probably occur on the initial import - nothing will be enabled in Satellite.
    # Also if there are no updates during incremental sync.
    if not repos_to_sync:
        msg = "No updates in imported content - skipping sync"
        helpers.log_msg(msg, 'WARNING')
        return
    else:
        msg = "Repo ids to sync: " + str(repos_to_sync)
        helpers.log_msg(msg, 'DEBUG')

        msg = "Syncing repositories"
        helpers.log_msg(msg, 'INFO')
        print msg

        # Break repos_to_sync into groups of n 
        repochunks = [ repos_to_sync[i:i+helpers.SYNCBATCH] for i in range(0, len(repos_to_sync), helpers.SYNCBATCH) ]

        # Loop through the smaller batches of repos and sync them
        for chunk in repochunks:
            chunksize = len(chunk)
            msg = "Syncing repo batch " + str(chunk)
            helpers.log_msg(msg, 'DEBUG')
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/bulk/sync", \
                    json.dumps(
                        {
                            "ids": chunk,
                        }
                    ))["id"]
            msg = "Repo sync task id = " + task_id
            helpers.log_msg(msg, 'DEBUG')

            # Now we need to wait for the sync to complete
            helpers.wait_for_task(task_id, 'sync')

            tinfo = helpers.get_task_status(task_id)
            if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                msg = "Batch of " + str(chunksize) + " repos complete"
                helpers.log_msg(msg, 'INFO')
                print helpers.GREEN + msg + helpers.ENDC
            else:
                msg = "Batch sync has errors"
                helpers.log_msg(msg, 'WARNING')

        return delete_override
def main():
    """
    Main Routine
    """
    #pylint: disable-msg=R0912,R0914,R0915

    if helpers.DISCONNECTED:
        msg = "Export cannot be run on the disconnected Satellite host"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Who is running this script?
    runuser = helpers.who_is_running()

    # Log the fact we are starting
    msg = "------------- Content export started by " + runuser + " ----------------"
    helpers.log_msg(msg, 'INFO')

    # Check for sane input
    parser = argparse.ArgumentParser(description='Performs Export of Default Content View.')
    group = parser.add_mutually_exclusive_group()
    # pylint: disable=bad-continuation
    parser.add_argument('-o', '--org', help='Organization', required=True)
    parser.add_argument('-e', '--env', help='Environment config file', required=False)
    group.add_argument('-a', '--all', help='Export ALL content', required=False,
        action="store_true")
    group.add_argument('-i', '--incr', help='Incremental Export of content since last run',
        required=False, action="store_true")
    group.add_argument('-s', '--since', help='Export content since YYYY-MM-DD HH:MM:SS',
        required=False, type=helpers.valid_date)
    parser.add_argument('-l', '--last', help='Display time of last export', required=False,
        action="store_true")
    args = parser.parse_args()

    # Set our script variables from the input args
    org_name = args.org
    since = args.since

    # Record where we are running from
    script_dir = str(os.getcwd())

    # Get the org_id (Validates our connection to the API)
    org_id = helpers.get_org_id(org_name)
    exported_repos = []
    # If a specific environment is requested, find and read that config file
    if args.env:
        if not os.path.exists('config/' + args.env + '.yml'):
            print "ERROR: Config file 'config/" + args.env + ".yml' not found."
            sys.exit(-1)
        cfg = yaml.safe_load(open("config/" + args.env + ".yml", 'r'))
        ename = args.env
        erepos = cfg["env"]["repos"]
        msg = "Specific environment export called for " + ename + ". Configured repos:"
        helpers.log_msg(msg, 'DEBUG')
        for repo in erepos:
            msg = "  - " + repo
            helpers.log_msg(msg, 'DEBUG')

    else:
        ename = 'DoV'
        label = 'DoV'
        msg = "DoV export called"
        helpers.log_msg(msg, 'DEBUG')

    # Get the current time - this will be the 'last export' time if the export is OK
    start_time = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')
    print "START: " + start_time + " (" + ename + " export)"

    # Read the last export date pickle for our selected repo group.
    export_times = read_pickle(ename)
    export_type = 'incr'

    if args.all:
        print "Performing full content export for " + ename
        export_type = 'full'
        since = False
    else:
        if not since:
            since = False
            if args.last:
                if export_times:
                    print "Last successful export for " + ename + ":"
                    for time in export_times:
                        print str(time) + '\t' + str(export_times[time])
                else:
                    print "Export has never been performed for " + ename
                sys.exit(-1)
            if not export_times:
                print "No prior export recorded for " + ename + ", performing full content export"
                export_type = 'full'
        else:
            # TODO: Re-populate export_times dictionary so each repo has 'since' date
            since = True
            since_export = str(since)

            # We have our timestamp so we can kick of an incremental export
            print "Incremental export of content for " + ename + " synchronised after " \
            + str(since)

    # Check the available space in /var/lib/pulp
    check_disk_space(export_type)

    # TODO: Remove any previous exported content
#    os.chdir(helpers.EXPORTDIR)
#    shutil.rmtree()


    # Collect a list of enabled repositories. This is needed for:
    # 1. Matching specific repo exports, and
    # 2. Running import sync per repo on the disconnected side
    repolist = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    # If we are running a full DoV export we run a different set of API calls...
    if ename == 'DoV':
        if export_type == 'incr' and 'DoV' in export_times:
            last_export = export_times['DoV']
            if since:
                last_export = since_export
            msg = "Exporting DoV (INCR since " + last_export + ")"
        else:
            export_type = 'full'
            last_export = '2000-01-01 12:00:00' # This is a dummy value, never used.
            msg = "Exporting DoV (FULL)"
        helpers.log_msg(msg, 'INFO')
        print msg

        # Check if there are any currently running tasks that will conflict with an export
        check_running_tasks(label, ename)

        # Get the version of the CV (Default Org View) to export
        dov_ver = get_cv(org_id)

        # Now we have a CV ID and a starting date, and no conflicting tasks, we can export
        export_id = export_cv(dov_ver, last_export, export_type)

        # Now we need to wait for the export to complete
        helpers.wait_for_task(export_id, 'export')

        # Check if the export completed OK. If not we exit the script.
        tinfo = helpers.get_task_status(export_id)
        if tinfo['state'] != 'running' and tinfo['result'] == 'success':
            msg = "Content View Export OK"
            helpers.log_msg(msg, 'INFO')
            print helpers.GREEN + msg + helpers.ENDC

            # Update the export timestamp for this repo
            export_times['DoV'] = start_time

            # Generate a list of repositories that were exported
            for repo_result in repolist['results']:
                if repo_result['content_type'] == 'yum':
                    # Add the repo to the successfully exported list
                    exported_repos.append(repo_result['label'])

        else:
            msg = "Content View Export FAILED"
            helpers.log_msg(msg, 'ERROR')
            sys.exit(-1)

    else:
        # Verify that defined repos exist in our DoV
        for repo_result in repolist['results']:
            if repo_result['content_type'] == 'yum':
                # If we have a match, do the export
                if repo_result['label'] in erepos:
                    # Extract the last export time for this repo
                    if export_type == 'incr' and repo_result['label'] in export_times:
                        last_export = export_times[repo_result['label']]
                        if since:
                            last_export = since_export
                        msg = "Exporting " + repo_result['label'] \
                            + " (INCR since " + last_export + ")"
                    else:
                        export_type = 'full'
                        last_export = '2000-01-01 12:00:00' # This is a dummy value, never used.
                        msg = "Exporting " + repo_result['label'] + "(FULL)"
                    helpers.log_msg(msg, 'INFO')
                    print msg

                    # Check if there are any currently running tasks that will conflict
                    ok_to_export = check_running_tasks(repo_result['label'], ename)

                    if ok_to_export:
                        # Trigger export on the repo
                        export_id = export_repo(repo_result['id'], last_export, export_type)

                        # Now we need to wait for the export to complete
                        helpers.wait_for_task(export_id, 'export')

                        # Check if the export completed OK. If not we exit the script.
                        tinfo = helpers.get_task_status(export_id)
                        if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                            msg = "Repository Export OK"
                            helpers.log_msg(msg, 'INFO')
                            print helpers.GREEN + msg + helpers.ENDC

                            # Update the export timestamp for this repo
                            export_times[repo_result['label']] = start_time

                            # Add the repo to the successfully exported list
                            exported_repos.append(repo_result['label'])
                        else:
                            msg = "Export FAILED"
                            helpers.log_msg(msg, 'ERROR')


                else:
                    msg = "Skipping  " + repo_result['label']
                    helpers.log_msg(msg, 'DEBUG')


    # Combine resulting directory structures into a single repo format (top level = /content)
    prep_export_tree(org_name)

    # Now we need to process the on-disk export data.
    # Define the location of our exported data.
    export_dir = helpers.EXPORTDIR + "/export"

    # Write out the list of exported repos. This will be transferred to the disconnected system
    # and used to perform the repo sync tasks during the import.
    pickle.dump(exported_repos, open(export_dir + '/exported_repos.pkl', 'wb'))

    # Run GPG Checks on the exported RPMs
    do_gpg_check(export_dir)

    # Add our exported data to a tarfile
    create_tar(export_dir, ename)

    # We're done. Write the start timestamp to file for next time
    os.chdir(script_dir)
    pickle.dump(export_times, open('var/exports_' + ename + '.pkl', "wb"))

    # And we're done!
    print helpers.GREEN + "Export complete.\n" + helpers.ENDC
    print 'Please transfer the contents of ' + helpers.EXPORTDIR + \
        'to your disconnected Satellite system content import location.\n' \
        'Once transferred, please run ' + helpers.BOLD + ' sat_import' \
        + helpers.ENDC + ' to extract it.'
Example #14
0
def main(args):
    """
    Main Routine
    """
    #pylint: disable-msg=R0912,R0914,R0915

    if helpers.DISCONNECTED:
        msg = "Export cannot be run on the disconnected Satellite host"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Who is running this script?
    runuser = helpers.who_is_running()

    # Set the base dir of the script and where the var data is
    global dir 
    global vardir 
    dir = os.path.dirname(__file__)
    vardir = os.path.join(dir, 'var')

    # Log the fact we are starting
    msg = "------------- Content export started by " + runuser + " ----------------"
    helpers.log_msg(msg, 'INFO')

    # Check for sane input
    parser = argparse.ArgumentParser(description='Performs Export of Default Content View.')
    group = parser.add_mutually_exclusive_group()
    # pylint: disable=bad-continuation
    parser.add_argument('-o', '--org', help='Organization (Uses default if not specified)',
        required=False)
    parser.add_argument('-e', '--env', help='Environment config file', required=False)
    group.add_argument('-a', '--all', help='Export ALL content', required=False,
        action="store_true")
    group.add_argument('-i', '--incr', help='Incremental Export of content since last run',
        required=False, action="store_true")
    group.add_argument('-s', '--since', help='Export content since YYYY-MM-DD HH:MM:SS',
        required=False, type=helpers.valid_date)
    parser.add_argument('-l', '--last', help='Display time of last export', required=False,
        action="store_true")
    parser.add_argument('-n', '--nogpg', help='Skip GPG checking', required=False,
        action="store_true")
    parser.add_argument('-r', '--repodata', help='Include repodata for repos with no new packages', 
        required=False, action="store_true")
    args = parser.parse_args()

    # Set our script variables from the input args
    if args.org:
        org_name = args.org
    else:
       org_name = helpers.ORG_NAME
    since = args.since

    # Record where we are running from
    script_dir = str(os.getcwd())

    # Get the org_id (Validates our connection to the API)
    org_id = helpers.get_org_id(org_name)
    exported_repos = []
    # If a specific environment is requested, find and read that config file
    repocfg = os.path.join(dir, 'config/' + args.env + '.yml')
    if args.env:
        if not os.path.exists(repocfg):
            print "ERROR: Config file " + repocfg + " not found."
            sys.exit(-1)
        cfg = yaml.safe_load(open(repocfg, 'r'))
        ename = args.env
        erepos = cfg["env"]["repos"]
        msg = "Specific environment export called for " + ename + ". Configured repos:"
        helpers.log_msg(msg, 'DEBUG')
        for repo in erepos:
            msg = "  - " + repo
            helpers.log_msg(msg, 'DEBUG')

    else:
        ename = 'DoV'
        label = 'DoV'
        msg = "DoV export called"
        helpers.log_msg(msg, 'DEBUG')

    # Get the current time - this will be the 'last export' time if the export is OK
    start_time = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')
    print "START: " + start_time + " (" + ename + " export)"

    # Read the last export date pickle for our selected repo group.
    export_times = read_pickle(ename)
    export_type = 'incr'

    if args.all:
        print "Performing full content export for " + ename
        export_type = 'full'
        since = False
    else:
        if not since:
            since = False
            if args.last:
                if export_times:
                    print "Last successful export for " + ename + ":"
                    for time in export_times:
                        repo = "{:<70}".format(time)
                        print repo[:70] + '\t' + str(export_times[time])
                else:
                    print "Export has never been performed for " + ename
                sys.exit(-1)
            if not export_times:
                print "No prior export recorded for " + ename + ", performing full content export"
                export_type = 'full'
        else:
            # Re-populate export_times dictionary so each repo has 'since' date
            since_export = str(since)

            # We have our timestamp so we can kick of an incremental export
            print "Incremental export of content for " + ename + " synchronised after " \
            + str(since)

    # Check the available space in /var/lib/pulp
    check_disk_space(export_type)

    # Remove any previous exported content left behind by prior unclean exit
    if os.path.exists(helpers.EXPORTDIR + '/export'):
        msg = "Removing existing export directory"
        helpers.log_msg(msg, 'DEBUG')
        shutil.rmtree(helpers.EXPORTDIR + '/export')

    # Collect a list of enabled repositories. This is needed for:
    # 1. Matching specific repo exports, and
    # 2. Running import sync per repo on the disconnected side
    repolist = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
                json.dumps(
                        {
                           "organization_id": org_id,
                           "per_page": '1000',
                        }
                ))

    # If we are running a full DoV export we run a different set of API calls...
    if ename == 'DoV':
        cola = "Exporting DoV"
        if export_type == 'incr' and 'DoV' in export_times:
            last_export = export_times['DoV']
            if since:
                last_export = since_export
            colb = "(INCR since " + last_export + ")"
        else:
            export_type = 'full'
            last_export = '2000-01-01 12:00:00' # This is a dummy value, never used.
            colb = "(FULL)"
        msg = cola + " " + colb
        helpers.log_msg(msg, 'INFO')
        output = "{:<70}".format(cola)
        print output[:70] + ' ' + colb

        # Check if there are any currently running tasks that will conflict with an export
        check_running_tasks(label, ename)

        # Get the version of the CV (Default Org View) to export
        dov_ver = get_cv(org_id)

        # Now we have a CV ID and a starting date, and no conflicting tasks, we can export
        export_id = export_cv(dov_ver, last_export, export_type)

        # Now we need to wait for the export to complete
        helpers.wait_for_task(export_id, 'export')

        # Check if the export completed OK. If not we exit the script.
        tinfo = helpers.get_task_status(export_id)
        if tinfo['state'] != 'running' and tinfo['result'] == 'success':
            msg = "Content View Export OK"
            helpers.log_msg(msg, 'INFO')
            print helpers.GREEN + msg + helpers.ENDC

            # Update the export timestamp for this repo
            export_times['DoV'] = start_time

            # Generate a list of repositories that were exported
            for repo_result in repolist['results']:
                if repo_result['content_type'] == 'yum':
                    # Add the repo to the successfully exported list
                    exported_repos.append(repo_result['label'])

        else:
            msg = "Content View Export FAILED"
            helpers.log_msg(msg, 'ERROR')
            sys.exit(-1)

    else:
        # Verify that defined repos exist in Satellite
        for repo in erepos:
            repo_in_sat = False
            for repo_x in repolist['results']:
                if re.findall("\\b" + repo + "\\b$", repo_x['label']):
                    repo_in_sat = True
                    break
            if repo_in_sat == False:
                msg = "'" + repo + "' not found in Satellite"
                helpers.log_msg(msg, 'WARNING')

        # Process each repo
        for repo_result in repolist['results']:
            if repo_result['content_type'] == 'yum':
                # If we have a match, do the export
                if repo_result['label'] in erepos:
                    # Extract the last export time for this repo
                    orig_export_type = export_type
                    cola = "Export " + repo_result['label']
                    if export_type == 'incr' and repo_result['label'] in export_times:
                        last_export = export_times[repo_result['label']]
                        if since:
                            last_export = since_export
                        colb = "(INCR since " + last_export + ")"
                    else:
                        export_type = 'full'
                        last_export = '2000-01-01 12:00:00' # This is a dummy value, never used.
                        colb = "(FULL)"
                    msg = cola + " " + colb
                    helpers.log_msg(msg, 'INFO')
                    output = "{:<70}".format(cola)
                    print output[:70] + ' ' + colb

                    # Check if there are any currently running tasks that will conflict
                    ok_to_export = check_running_tasks(repo_result['label'], ename)

                    if ok_to_export:
                        # Trigger export on the repo
                        export_id = export_repo(repo_result['id'], last_export, export_type)

                        # Now we need to wait for the export to complete
                        helpers.wait_for_task(export_id, 'export')

                        # Check if the export completed OK. If not we exit the script.
                        tinfo = helpers.get_task_status(export_id)
                        if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                            # Count the number of exported packages
                            # First resolve the product label - this forms part of the export path
                            product = get_product(org_id, repo_result['product']['cp_id'])
                            # Now we can build the export path itself
                            basepath = helpers.EXPORTDIR + "/" + org_name + "-" + product + "-" + repo_result['label']
                            if export_type == 'incr':
                                basepath = basepath + "-incremental"
                            exportpath = basepath + "/" + repo_result['relative_path']
                            msg = "\nExport path = " + exportpath
                            helpers.log_msg(msg, 'DEBUG')

                            os.chdir(exportpath)
                            numrpms = len([f for f in os.walk(".").next()[2] if f[ -4: ] == ".rpm"])

                            msg = "Repository Export OK (" + str(numrpms) + " new packages)"
                            helpers.log_msg(msg, 'INFO')
                            print helpers.GREEN + msg + helpers.ENDC

                            # Update the export timestamp for this repo
                            export_times[repo_result['label']] = start_time

                            # Add the repo to the successfully exported list
                            if numrpms != 0 or args.repodata:
                                msg = "Adding " + repo_result['label'] + " to export list"
                                helpers.log_msg(msg, 'DEBUG')
                                exported_repos.append(repo_result['label'])
                            else:
                                msg = "Not including repodata for empty repo " + repo_result['label']
                                helpers.log_msg(msg, 'DEBUG')

                        else:
                            msg = "Export FAILED"
                            helpers.log_msg(msg, 'ERROR')

                        # Reset the export type to the user specified, in case we overrode it.
                        export_type = orig_export_type

                else:
                    msg = "Skipping  " + repo_result['label']
                    helpers.log_msg(msg, 'DEBUG')

            # Handle FILE type exports (ISO repos)
            elif repo_result['content_type'] == 'file':
                # If we have a match, do the export
                if repo_result['label'] in erepos:
                    # Extract the last export time for this repo
                    orig_export_type = export_type
                    cola = "Export " + repo_result['label']
                    if export_type == 'incr' and repo_result['label'] in export_times:
                        last_export = export_times[repo_result['label']]
                        if since:
                            last_export = since_export
                        colb = "(INCR since " + last_export + ")"
                    else:
                        export_type = 'full'
                        last_export = '2000-01-01 12:00:00' # This is a dummy value, never used.
                        colb = "(FULL)"
                    msg = cola + " " + colb
                    helpers.log_msg(msg, 'INFO')
                    output = "{:<70}".format(cola)
                    print output[:70] + ' ' + colb

                    # Check if there are any currently running tasks that will conflict
                    ok_to_export = check_running_tasks(repo_result['label'], ename)

                    if ok_to_export:
                        # Trigger export on the repo
                        numfiles = export_iso(repo_result['id'], repo_result['label'], repo_result['relative_path'], last_export, export_type)

                        # Reset the export type to the user specified, in case we overrode it.
                        export_type = orig_export_type

                        # Update the export timestamp for this repo
                        export_times[repo_result['label']] = start_time
                        
                        # Add the repo to the successfully exported list
                        if numfiles != 0 or args.repodata:
                            msg = "Adding " + repo_result['label'] + " to export list"
                            helpers.log_msg(msg, 'DEBUG')
                            exported_repos.append(repo_result['label'])
                        else:
                            msg = "Not including repodata for empty repo " + repo_result['label']
                            helpers.log_msg(msg, 'DEBUG')

                else:
                    msg = "Skipping  " + repo_result['label']
                    helpers.log_msg(msg, 'DEBUG')



    # Combine resulting directory structures into a single repo format (top level = /content)
    prep_export_tree(org_name)

    # Now we need to process the on-disk export data.
    # Define the location of our exported data.
    export_dir = helpers.EXPORTDIR + "/export"

    # Write out the list of exported repos. This will be transferred to the disconnected system
    # and used to perform the repo sync tasks during the import.
    pickle.dump(exported_repos, open(export_dir + '/exported_repos.pkl', 'wb'))

    # Run GPG Checks on the exported RPMs
    if not args.nogpg:
        do_gpg_check(export_dir)

    # Add our exported data to a tarfile
    create_tar(export_dir, ename)

    # We're done. Write the start timestamp to file for next time
    os.chdir(script_dir)
    pickle.dump(export_times, open(vardir + '/exports_' + ename + '.pkl', "wb"))

    # And we're done!
    print helpers.GREEN + "Export complete.\n" + helpers.ENDC
    print 'Please transfer the contents of ' + helpers.EXPORTDIR + \
        ' to your disconnected Satellite system content import location.\n' \
        'Once transferred, please run ' + helpers.BOLD + ' sat_import' \
        + helpers.ENDC + ' to extract it.'
def check_counts(org_id, package_count, count):
    """
    Verify the number of pkgs/errutum in each repo match the sync host.
    Input is a dictionary loaded from a pickle that was created on the sync
    host in format  {Repo_Label, pkgs:erratum}
    """

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # First loop through the repos in the import dict and find the local ID
    table_data = []
    logtable_data = []
    display_data = False
    for repo, counts in package_count.iteritems():
        # Split the count data into packages and erratum
        sync_pkgs = counts.split(':')[0]
        sync_erratum = counts.split(':')[1]

        # Loop through each repo and count the local pkgs in each repo
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                # Ensure we have an exact match on the repo label
                if repo == repo_result['label']:
                    local_pkgs, local_erratum = count_packages(repo_result['id'])

                    # Set the output colour of the table entry based on the pkg counts
                    if int(local_pkgs) == int(sync_pkgs):
                        colour = helpers.GREEN
                        display = False
                    elif int(local_pkgs) == 0 and int(sync_pkgs) != 0:
                        colour = helpers.RED
                        display = True
                        display_data = True
                    elif int(local_pkgs) < int(sync_pkgs):
                        colour = helpers.YELLOW
                        display = True
                        display_data = True
                    else:
                        # If local_pkg > sync_pkg - can happen due to 'mirror on sync' option
                        # - sync host deletes old pkgs. If this is the case we cannot verify
                        # an exact package status so we'll set BLUE
                        colour = helpers.BLUE
                        display = False
                        display_data = True

                    # Tuncate the repo label to 70 chars and build the table row
                    reponame = "{:<70}".format(repo)
                    # Add all counts if it has been requested
                    if count:
                        display_data = True
                        table_data.append([colour, repo[:70], str(sync_pkgs), str(local_pkgs), helpers.ENDC])
                    else:
                        # Otherwise only add counts that are non-green (display = True)
                        if display:
                            table_data.append([colour, repo[:70], str(sync_pkgs), str(local_pkgs), helpers.ENDC])
                    # Always log all package data to the log regardless of 'count'
                    logtable_data.append([repo[:70], str(sync_pkgs), str(local_pkgs)])

    if display_data:
        msg = '\nRepository package mismatch count verification...'
        helpers.log_msg(msg, 'INFO')
        print msg

        # Print Table header
        header = ["", "Repository", "SyncHost", "ThisHost", ""]
        header1 = ["", "------------------------------------------------------------", "--------", "--------", ""]
        row_format = "{:<1} {:<70} {:>9} {:>9} {:<1}"
        logrow_format = "{:<70} {:>9} {:>9}"
        print row_format.format(*header)
        helpers.log_msg(row_format.format(*header), 'INFO')
        print row_format.format(*header1)
        helpers.log_msg(row_format.format(*header1), 'INFO')

        # Print the table rows
        for row in table_data:
            print row_format.format(*row)
        for row in logtable_data:
            helpers.log_msg(logrow_format.format(*row), 'INFO')
        print '\n'