Exemplo n.º 1
0
def export_cv(dov_ver, last_export, export_type):
    """
    Export Content View
    Takes the content view version and a start time (API 'since' value)
    """
    if export_type == 'full':
        msg = "Exporting complete DOV version " + str(dov_ver)
    else:
        msg = "Exporting DOV version " + str(dov_ver) + " from start date " + last_export
    helpers.log_msg(msg, 'INFO')

    try:
        if export_type == 'full':
            task_id = helpers.post_json(
                helpers.KATELLO_API + "content_view_versions/" + str(dov_ver) + "/export", \
                    json.dumps(
                        {
                        }
                    ))["id"]
        else:
            task_id = helpers.post_json(
                helpers.KATELLO_API + "content_view_versions/" + str(dov_ver) + "/export/", \
                    json.dumps(
                        {
                            "since": last_export,
                        }
                    ))["id"]
    except: # pylint: disable-msg=W0702
        msg = "Unable to start export - Conflicting Sync or Export already in progress"
        helpers.log_msg(msg, 'ERROR')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            subject = "Satellite 6 export failure"
            helpers.mailout(subject, output)
        sys.exit(1)

    # Trap some other error conditions
    if "Required lock is already taken" in str(task_id):
        msg = "Unable to start export - Sync in progress"
        helpers.log_msg(msg, 'ERROR')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            subject = "Satellite 6 export failure"
            helpers.mailout(subject, output)
        sys.exit(1)

    msg = "Export started, task_id = " + str(task_id)
    helpers.log_msg(msg, 'DEBUG')

    return str(task_id)
def promote(target_env, ver_list, ver_descr, ver_version, env_list, prior_list, dry_run):
    """Promote Content View"""
    target_env_id = env_list[target_env]
    source_env_id = prior_list[target_env_id]

    # Extract the name of the source environment so we can inform the user
    for key, val in env_list.items():
        if val == source_env_id:
            prior_env = key

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Promotion from " + prior_env + " to " + target_env

    # Now we have all the info needed, we can actually trigger the promotion.
    # Loop through each CV with promotable versions
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching promotion criteria
    if not ver_list:
        msg = "No content view versions found matching promotion criteria"
        helpers.log_msg(msg, 'WARNING')
        sys.exit(1)

    for cvid in ver_list.keys():

        # Check if there is a publish/promote already running on this content view
        locked = helpers.check_running_publish(cvid, ver_descr[cvid])

        if not locked:
            msg = "Promoting '" + str(ver_descr[cvid]) + "' Version " + str(ver_version[cvid]) +\
                " from " + prior_env + " to " + str(target_env)
            helpers.log_msg(msg, 'INFO')
            print helpers.HEADER + msg + helpers.ENDC

        if not dry_run and not locked:
            try:
                task_id = helpers.post_json(
                    helpers.KATELLO_API + "content_view_versions/" + str(ver_list[cvid]) +\
                    "/promote/", json.dumps(
                        {
                            "environment_id": target_env_id
                        }
                        ))["id"]
            except Warning:
                msg = "Failed to initiate promotion of " + str(ver_descr[cvid])
                helpers.log_msg(msg, 'WARNING')
            else:
                task_list.append(task_id)
                ref_list[task_id] = ver_descr[cvid]

    # Exit in the case of a dry-run
    if dry_run:
        msg = "Dry run - not actually performing promotion"
        helpers.log_msg(msg, 'WARNING')
        sys.exit(2)


    return task_list, ref_list, task_name
Exemplo n.º 3
0
def export_repo(repo_id, last_export, export_type):
    """
    Export individual repository
    Takes the repository id and a start time (API 'since' value)
    """
    if export_type == 'full':
        msg = "Exporting repository id " + str(repo_id)
    else:
        msg = "Exporting repository id " + str(
            repo_id) + " from start date " + last_export
    helpers.log_msg(msg, 'INFO')

    try:
        if export_type == 'full':
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/" + str(repo_id) + "/export", \
                    json.dumps(
                        {
                        }
                    ))["id"]
        else:
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/" + str(repo_id) + "/export/", \
                    json.dumps(
                        {
                            "since": last_export,
                        }
                    ))["id"]
    except:  # pylint: disable-msg=W0702
        msg = "Unable to start export - Conflicting Sync or Export already in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Trap some other error conditions
    if "Required lock is already taken" in str(task_id):
        msg = "Unable to start export - Sync in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    msg = "Export started, task_id = " + str(task_id)
    helpers.log_msg(msg, 'DEBUG')

    return str(task_id)
Exemplo n.º 4
0
def export_repo(repo_id, last_export, export_type):
    """
    Export individual repository
    Takes the repository id and a start time (API 'since' value)
    """
    if export_type == 'full':
        msg = "Exporting repository id " + str(repo_id)
    else:
        msg = "Exporting repository id " + str(repo_id) + " from start date " + last_export
    helpers.log_msg(msg, 'INFO')

    try:
        if export_type == 'full':
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/" + str(repo_id) + "/export", \
                    json.dumps(
                        {
                        }
                    ))["id"]
        else:
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/" + str(repo_id) + "/export/", \
                    json.dumps(
                        {
                            "since": last_export,
                        }
                    ))["id"]
    except: # pylint: disable-msg=W0702
        msg = "Unable to start export - Conflicting Sync or Export already in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Trap some other error conditions
    if "Required lock is already taken" in str(task_id):
        msg = "Unable to start export - Sync in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    msg = "Export started, task_id = " + str(task_id)
    helpers.log_msg(msg, 'DEBUG')

    return str(task_id)
Exemplo n.º 5
0
def export_cv(dov_ver, last_export, export_type):
    """
    Export Content View
    Takes the content view version and a start time (API 'since' value)
    """
    if export_type == 'full':
        msg = "Exporting complete DOV version " + str(dov_ver)
    else:
        msg = "Exporting DOV version " + str(dov_ver) + " from start date " + last_export
    helpers.log_msg(msg, 'INFO')

    try:
        if export_type == 'full':
            task_id = helpers.post_json(
                helpers.KATELLO_API + "content_view_versions/" + str(dov_ver) + "/export", \
                    json.dumps(
                        {
                        }
                    ))["id"]
        else:
            task_id = helpers.post_json(
                helpers.KATELLO_API + "content_view_versions/" + str(dov_ver) + "/export/", \
                    json.dumps(
                        {
                            "since": last_export,
                        }
                    ))["id"]
    except: # pylint: disable-msg=W0702
        msg = "Unable to start export - Conflicting Sync or Export already in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    # Trap some other error conditions
    if "Required lock is already taken" in str(task_id):
        msg = "Unable to start export - Sync in progress"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(-1)

    msg = "Export started, task_id = " + str(task_id)
    helpers.log_msg(msg, 'DEBUG')

    return str(task_id)
Exemplo n.º 6
0
def publish(ver_list, ver_descr, ver_version, dry_run, runuser):
    """Publish Content View"""

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Publish content view to Library"

    # Now we have all the info needed, we can actually trigger the publish.
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching publish criteria
    if not ver_list:
        msg = "No content view versions found matching publication criteria"
        helpers.log_msg(msg, 'ERROR')
        sys.exit(1)

    for cvid in ver_list.keys():

        # Check if there is a publish/promote already running on this content view
        locked = helpers.check_running_publish(ver_list[cvid], ver_descr[cvid])

        if not locked:
            msg = "Publishing '" + str(ver_descr[cvid]) + "' Version " + str(ver_version[cvid]) + ".0"
            helpers.log_msg(msg, 'INFO')
            print helpers.HEADER + msg + helpers.ENDC

        # Set up the description that will be added to the published version
        description = "Published by " + runuser + "\n via API script"

        if not dry_run and not locked:
            try:
                task_id = helpers.post_json(
                    helpers.KATELLO_API + "content_views/" + str(ver_list[cvid]) +\
                    "/publish", json.dumps(
                        {
                            "description": description
                        }
                        ))["id"]
            except Warning:
                msg = "Failed to initiate publication of " + str(ver_descr[cvid])
                helpers.log_msg(msg, 'WARNING')
            else:
                task_list.append(task_id)
                ref_list[task_id] = ver_descr[cvid]

    # Exit in the case of a dry-run
    if dry_run:
        msg = "Dry run - not actually performing publish"
        helpers.log_msg(msg, 'WARNING')
        sys.exit(2)


    return task_list, ref_list, task_name
Exemplo n.º 7
0
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                do_import = True
                repos_to_sync.append(repo_result['id'])
        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    msg = "Repo ids to sync: " + str(repos_to_sync)
    helpers.log_msg(msg, 'DEBUG')

    msg = "Syncing repositories"
    helpers.log_msg(msg, 'INFO')
    print msg
    task_id = helpers.post_json(
        helpers.KATELLO_API + "repositories/bulk/sync", \
            json.dumps(
                {
                    "ids": repos_to_sync,
                }
            ))["id"]
    msg = "Repo sync task id = " + task_id
    helpers.log_msg(msg, 'DEBUG')

    return task_id, delete_override
def promote(target_env, ver_list, ver_descr, ver_version, env_list, prior_list, dry_run, quiet):
    """Promote Content View"""
    target_env_id = env_list[target_env]
    source_env_id = prior_list[target_env_id]

    # Extract the name of the source environment so we can inform the user
    for key, val in env_list.items():
        if val == source_env_id:
            prior_env = key

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Promotion from " + prior_env + " to " + target_env

    # Now we have all the info needed, we can actually trigger the promotion.
    # Loop through each CV with promotable versions
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching promotion criteria
    if not ver_list:
        msg = "No content view versions found matching promotion criteria"
        helpers.log_msg(msg, 'WARNING')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            helpers.mailout(helpers.MAILSUBJ_FP, output)
        sys.exit(1)

    # Break repos to promote into batches as configured in config.yml
    cvchunks = [ ver_list.keys()[i:i+helpers.PROMOTEBATCH] for i in range(0, len(ver_list), helpers.PROMOTEBATCH) ]

    # Loop through the smaller subsets of repo id's
    for chunk in cvchunks:
        for cvid in chunk:

            # Check if there is a publish/promote already running on this content view
            locked = helpers.check_running_publish(cvid, ver_descr[cvid])

            if not locked:
                msg = "Promoting '" + str(ver_descr[cvid]) + "' Version " + str(ver_version[cvid]) +\
                    " from " + prior_env + " to " + str(target_env)
                helpers.log_msg(msg, 'INFO')
                print helpers.HEADER + msg + helpers.ENDC

            if not dry_run and not locked:
                try:
                    task_id = helpers.post_json(
                        helpers.KATELLO_API + "content_view_versions/" + str(ver_list[cvid]) +\
                        "/promote/", json.dumps(
                            {
                                "environment_id": target_env_id
                            }
                            ))["id"]
                except Warning:
                    msg = "Failed to initiate promotion of " + str(ver_descr[cvid])
                    helpers.log_msg(msg, 'WARNING')
                else:
                    task_list.append(task_id)
                    ref_list[task_id] = ver_descr[cvid]

        # Exit in the case of a dry-run
        if dry_run:
            msg = "Dry run - not actually performing promotion"
            helpers.log_msg(msg, 'WARNING')
        else:
            # Monitor the status of the promotion tasks
            helpers.watch_tasks(task_list, ref_list, task_name, quiet)

    # Exit in the case of a dry-run
    if dry_run:
        sys.exit(2)
    else:
        return
Exemplo n.º 9
0
def import_icon_people(in_file):

    auth_data = helpers.get_auth_data()

    directory = os.path.dirname(in_file)

    households_file = open(os.path.join(directory, 'icon.households.yaml'), 'r')
    people_file = open(os.path.join(directory, 'icon.people.yaml'), 'r')

    households = yaml.load(households_file)
    people = yaml.load_all(people_file)

    household_creator = functools.partial(helpers.request_data_builder,
                                          helpers.get_auth_data(),
                                          'membership', 'households', 'create')

    member_creator = functools.partial(helpers.request_data_builder,
                                          helpers.get_auth_data(),
                                          'membership', 'members', 'create')

    household_retriever = functools.partial(helpers.request_data_builder,
                                            helpers.get_auth_data(),
                                            'membership', 'households', 'read')

    member_retriever = functools.partial(helpers.request_data_builder,
                                          helpers.get_auth_data(),
                                          'membership', 'members', 'read')

    i = 0
    for person in people:
        person_id = None
        household_id = None
        if 'id' in person: continue # This person doesn't need to be imported.
        if person['household_id'] in households:
            household = households[person['household_id']]
            if not 'id' in household: # This person's household needs to be imported.
                # if household['status'] in conf.DEFAULT_STATUSES:
                #     household['status'] = conf.DEFAULT_STATUSES[household['status']]
                hh_request = household_creator(household)
                # print hh_request
                hh_data = helpers.post_json(helpers.get_api_url(), hh_request)
                if 'number' in hh_data:
                    print 'Error (%d): %s' % (hh_data['number'], hh_data['message'])
                    if hh_data['number'] == 421:
                        hh_data = helpers.post_json(helpers.get_api_url(), household_retriever(None, {
                                                    'last_name': household['last_name'],
                                                    'city': household['city'],
                                                    'state': household['state']}))
                        if 'households' in hh_data and len(hh_data['households']) == 1:
                            hh_string = '%s %s in %s, %s' % (household['first_name'], household['last_name'],
                                                             household['city'], household['state'])
                            household_id = int(hh_data['households'][0]['id'])
                            print 'Found existing record for %s with ID %s' % (hh_string, household_id)
                        else:
                            print 'Unable to find certain match for %s' % hh_string
                elif 'statistics' in hh_data:
                    household_id = int(hh_data['statistics']['last_id'])
            else:
                household_id = int(household['id'])

            if household_id:
                person_data = helpers.post_json(helpers.get_api_url(),member_retriever(None, {
                                                'first_name': person['first_name'],
                                                'last_name': person['last_name']}))
                if 'members' in person_data and len(person_data['members']) == 1:
                    print "%s %s already in IconCMO. Skipping." % (person['first_name'], person['last_name'])
                    continue
                person['household_id'] = household_id
                # if person['status'] in conf.DEFAULT_STATUSES:
                #     person['status'] = conf.DEFAULT_STATUSES[person['status']]
                if 'phone' in person and person['phone'] == household['phone']:
                    if 'phones' in person:
                        for phone in person['phones']:
                            if phone['id'] == 'Cell':
                                person['phone'] == phone['phone']
                        del person['phones']
                if 'email' in person:
                    del person['email']

                for key in person.keys():
                    if key in household and person[key] == household[key]:
                        if key in ['status', 'last_name', 'first_name']: continue
                        del person[key]
                p_request = member_creator(person)
                # print p_request
                p_data = helpers.post_json(helpers.get_api_url(), p_request)
                # print p_data
            else:
                print 'Unable to import %s %s due to problems with household identity.' % (
                    person['first_name'], person['last_name']
                )
        i += 1
Exemplo n.º 10
0
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                # Ensure we have an exact match on the repo label
                if repo == repo_result['label']:
                    do_import = True
                    repos_to_sync.append(repo_result['id'])

                    # Ensure Mirror-on-sync flag is set to FALSE to make sure incremental
                    # import does not (cannot) delete existing packages.
                    msg = "Setting mirror-on-sync=false for repo id " + str(
                        repo_result['id'])
                    helpers.log_msg(msg, 'DEBUG')
                    helpers.put_json(
                        helpers.KATELLO_API + "/repositories/" + str(repo_result['id']), \
                            json.dumps(
                                {
                                    "mirror_on_sync": False
                                }
                            ))

        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    # If we get to here and nothing was added to repos_to_sync we will abort the import.
    # This will probably occur on the initial import - nothing will be enabled in Satellite.
    # Also if there are no updates during incremental sync.
    if not repos_to_sync:
        msg = "No updates in imported content - skipping sync"
        helpers.log_msg(msg, 'WARNING')
        return
    else:
        msg = "Repo ids to sync: " + str(repos_to_sync)
        helpers.log_msg(msg, 'DEBUG')

        msg = "Syncing repositories"
        helpers.log_msg(msg, 'INFO')
        print msg

        # Break repos_to_sync into groups of n
        repochunks = [
            repos_to_sync[i:i + helpers.SYNCBATCH]
            for i in range(0, len(repos_to_sync), helpers.SYNCBATCH)
        ]

        # Loop through the smaller batches of repos and sync them
        for chunk in repochunks:
            chunksize = len(chunk)
            msg = "Syncing repo batch " + str(chunk)
            helpers.log_msg(msg, 'DEBUG')
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/bulk/sync", \
                    json.dumps(
                        {
                            "ids": chunk,
                        }
                    ))["id"]
            msg = "Repo sync task id = " + task_id
            helpers.log_msg(msg, 'DEBUG')

            # Now we need to wait for the sync to complete
            helpers.wait_for_task(task_id, 'sync')

            tinfo = helpers.get_task_status(task_id)
            if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                msg = "Batch of " + str(chunksize) + " repos complete"
                helpers.log_msg(msg, 'INFO')
                print helpers.GREEN + msg + helpers.ENDC
            else:
                msg = "Batch sync has errors"
                helpers.log_msg(msg, 'WARNING')

        return delete_override
Exemplo n.º 11
0
def sync_content(org_id, imported_repos):
    """
    Synchronize the repositories
    Triggers a sync of all repositories belonging to the configured sync plan
    """
    repos_to_sync = []
    delete_override = False

    # Get a listing of repositories in this Satellite
    enabled_repos = helpers.get_p_json(
        helpers.KATELLO_API + "/repositories/", \
            json.dumps(
                {
                    "organization_id": org_id,
                    "per_page": '1000',
                }
            ))

    # Loop through each repo to be imported/synced
    for repo in imported_repos:
        do_import = False
        for repo_result in enabled_repos['results']:
            if repo in repo_result['label']:
                do_import = True
                repos_to_sync.append(repo_result['id'])

                # Ensure Mirror-on-sync flag is set to FALSE to make sure incremental
                # import does not (cannot) delete existing packages.
                msg = "Setting mirror-on-sync=false for repo id " + str(repo_result['id'])
                helpers.log_msg(msg, 'DEBUG')
                helpers.put_json(
                    helpers.KATELLO_API + "/repositories/" + str(repo_result['id']), \
                        json.dumps(
                            {
                                "mirror_on_sync": False
                            }
                        ))

        if do_import:
            msg = "Repo " + repo + " found in Satellite"
            helpers.log_msg(msg, 'DEBUG')
        else:
            msg = "Repo " + repo + " is not enabled in Satellite"
            # If the repo is not enabled, don't delete the input files.
            # This gives the admin a chance to manually enable the repo and re-import
            delete_override = True
            helpers.log_msg(msg, 'WARNING')
            # TODO: We could go on here and try to enable the Red Hat repo .....

    # If we get to here and nothing was added to repos_to_sync we will abort the import.
    # This will probably occur on the initial import - nothing will be enabled in Satellite.
    # Also if there are no updates during incremental sync.
    if not repos_to_sync:
        msg = "No updates in imported content - skipping sync"
        helpers.log_msg(msg, 'WARNING')
        return
    else:
        msg = "Repo ids to sync: " + str(repos_to_sync)
        helpers.log_msg(msg, 'DEBUG')

        msg = "Syncing repositories"
        helpers.log_msg(msg, 'INFO')
        print msg

        # Break repos_to_sync into groups of n 
        repochunks = [ repos_to_sync[i:i+helpers.SYNCBATCH] for i in range(0, len(repos_to_sync), helpers.SYNCBATCH) ]

        # Loop through the smaller batches of repos and sync them
        for chunk in repochunks:
            chunksize = len(chunk)
            msg = "Syncing repo batch " + str(chunk)
            helpers.log_msg(msg, 'DEBUG')
            task_id = helpers.post_json(
                helpers.KATELLO_API + "repositories/bulk/sync", \
                    json.dumps(
                        {
                            "ids": chunk,
                        }
                    ))["id"]
            msg = "Repo sync task id = " + task_id
            helpers.log_msg(msg, 'DEBUG')

            # Now we need to wait for the sync to complete
            helpers.wait_for_task(task_id, 'sync')

            tinfo = helpers.get_task_status(task_id)
            if tinfo['state'] != 'running' and tinfo['result'] == 'success':
                msg = "Batch of " + str(chunksize) + " repos complete"
                helpers.log_msg(msg, 'INFO')
                print helpers.GREEN + msg + helpers.ENDC
            else:
                msg = "Batch sync has errors"
                helpers.log_msg(msg, 'WARNING')

        return delete_override
def publish(ver_list, ver_descr, ver_version, dry_run, runuser, description, quiet, forcemeta):
    """Publish Content View"""

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Publish content view to Library"

    # Now we have all the info needed, we can actually trigger the publish.
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching publish criteria
    if not ver_list:
        msg = "No content view versions found matching publication criteria"
        helpers.log_msg(msg, 'ERROR')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            helpers.mailout(helpers.MAILSUBJ_FP, output)
        sys.exit(1)

    # Break repos to publish into batches as configured in config.yml
    cvchunks = [ ver_list.keys()[i:i+helpers.PUBLISHBATCH] for i in range(0, len(ver_list), helpers.PUBLISHBATCH) ]

    # Loop through the smaller subsets of repo id's
    for chunk in cvchunks:
        for cvid in chunk:

            # Check if there is a publish/promote already running on this content view
            locked = helpers.check_running_publish(ver_list[cvid], ver_descr[cvid])

            if not locked:
                msg = "Publishing '" + str(ver_descr[cvid]) + "' Version " + str(ver_version[cvid]) + ".0"
                helpers.log_msg(msg, 'INFO')
                print helpers.HEADER + msg + helpers.ENDC

            if not dry_run and not locked:
                try:
                    task_id = helpers.post_json(
                        helpers.KATELLO_API + "content_views/" + str(ver_list[cvid]) +\
                        "/publish", json.dumps(
                            {
                                "description": description,
                                "force_yum_metadata_regeneration": str(forcemeta)
                            }
                            ))["id"]
                except Warning:
                    msg = "Failed to initiate publication of " + str(ver_descr[cvid])
                    helpers.log_msg(msg, 'WARNING')
                except KeyError:
                    msg = "Failed to initiate publication of " + str(ver_descr[cvid])
                    helpers.log_msg(msg, 'WARNING')
                else:
                    task_list.append(task_id)
                    ref_list[task_id] = ver_descr[cvid]

        # Notify user in the case of a dry-run
        if dry_run:
            msg = "Dry run - not actually performing publish"
            helpers.log_msg(msg, 'WARNING')
        else:
            # Wait for the tasks to finish
            helpers.watch_tasks(task_list, ref_list, task_name, quiet)

    # Exit in the case of a dry-run
    if dry_run:
        sys.exit(2)
    else:
        return
Exemplo n.º 13
0
def publish(ver_list, ver_descr, ver_version, dry_run, runuser, description,
            quiet):
    """Publish Content View"""

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Publish content view to Library"

    # Now we have all the info needed, we can actually trigger the publish.
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching publish criteria
    if not ver_list:
        msg = "No content view versions found matching publication criteria"
        helpers.log_msg(msg, 'ERROR')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            helpers.mailout(helpers.MAILSUBJ_FP, output)
        sys.exit(1)

    # Break repos to publish into batches as configured in config.yml
    cvchunks = [
        ver_list.keys()[i:i + helpers.PUBLISHBATCH]
        for i in range(0, len(ver_list), helpers.PUBLISHBATCH)
    ]

    # Loop through the smaller subsets of repo id's
    for chunk in cvchunks:
        for cvid in chunk:

            # Check if there is a publish/promote already running on this content view
            locked = helpers.check_running_publish(ver_list[cvid],
                                                   ver_descr[cvid])

            if not locked:
                msg = "Publishing '" + str(
                    ver_descr[cvid]) + "' Version " + str(
                        ver_version[cvid]) + ".0"
                helpers.log_msg(msg, 'INFO')
                print helpers.HEADER + msg + helpers.ENDC

            if not dry_run and not locked:
                try:
                    task_id = helpers.post_json(
                        helpers.KATELLO_API + "content_views/" + str(ver_list[cvid]) +\
                        "/publish", json.dumps(
                            {
                                "description": description
                            }
                            ))["id"]
                except Warning:
                    msg = "Failed to initiate publication of " + str(
                        ver_descr[cvid])
                    helpers.log_msg(msg, 'WARNING')
                else:
                    task_list.append(task_id)
                    ref_list[task_id] = ver_descr[cvid]

        # Notify user in the case of a dry-run
        if dry_run:
            msg = "Dry run - not actually performing publish"
            helpers.log_msg(msg, 'WARNING')
        else:
            # Wait for the tasks to finish
            helpers.watch_tasks(task_list, ref_list, task_name, quiet)

    # Exit in the case of a dry-run
    if dry_run:
        sys.exit(2)
    else:
        return
def promote(target_env, ver_list, ver_descr, ver_version, env_list, prior_list, dry_run, quiet, forcemeta):
    """Promote Content View"""
    target_env_id = env_list[target_env]
    source_env_id = prior_list[target_env_id]

    # Extract the name of the source environment so we can inform the user
    for key, val in env_list.items():
        if val == source_env_id:
            prior_env = key

    # Set the task name to be displayed in the task monitoring stage
    task_name = "Promotion from " + prior_env + " to " + target_env

    # Now we have all the info needed, we can actually trigger the promotion.
    # Loop through each CV with promotable versions
    task_list = []
    ref_list = {}

    # Catch scenario that no CV versions are found matching promotion criteria
    if not ver_list:
        msg = "No content view versions found matching promotion criteria"
        helpers.log_msg(msg, 'WARNING')
        if helpers.MAILOUT:
            helpers.tf.seek(0)
            output = "{}".format(helpers.tf.read())
            helpers.mailout(helpers.MAILSUBJ_FP, output)
        sys.exit(1)

    # Break repos to promote into batches as configured in config.yml
    cvchunks = [ ver_list.keys()[i:i+helpers.PROMOTEBATCH] for i in range(0, len(ver_list), helpers.PROMOTEBATCH) ]

    # Loop through the smaller subsets of repo id's
    for chunk in cvchunks:
        for cvid in chunk:

            # Check if there is a publish/promote already running on this content view
            locked = helpers.check_running_publish(cvid, ver_descr[cvid])

            if not locked:
                msg = "Promoting '" + str(ver_descr[cvid]) + "' Version " + str(ver_version[cvid]) +\
                    " from " + prior_env + " to " + str(target_env)
                helpers.log_msg(msg, 'INFO')
                print helpers.HEADER + msg + helpers.ENDC

            if not dry_run and not locked:
                try:
                    task_id = helpers.post_json(
                        helpers.KATELLO_API + "content_view_versions/" + str(ver_list[cvid]) +\
                        "/promote/", json.dumps(
                            {
                                "environment_id": target_env_id,
                                "force_yum_metadata_regeneration": str(forcemeta)
                            }
                            ))["id"]
                except Warning:
                    msg = "Failed to initiate promotion of " + str(ver_descr[cvid])
                    helpers.log_msg(msg, 'WARNING')
                else:
                    task_list.append(task_id)
                    ref_list[task_id] = ver_descr[cvid]

        # Exit in the case of a dry-run
        if dry_run:
            msg = "Dry run - not actually performing promotion"
            helpers.log_msg(msg, 'WARNING')
        else:
            # Monitor the status of the promotion tasks
            helpers.watch_tasks(task_list, ref_list, task_name, quiet)

    # Exit in the case of a dry-run
    if dry_run:
        sys.exit(2)
    else:
        return
Exemplo n.º 15
0
def import_icon_people(in_file):

    auth_data = helpers.get_auth_data()

    directory = os.path.dirname(in_file)

    households_file = open(os.path.join(directory, 'icon.households.yaml'),
                           'r')
    people_file = open(os.path.join(directory, 'icon.people.yaml'), 'r')

    households = yaml.load(households_file)
    people = yaml.load_all(people_file)

    household_creator = functools.partial(helpers.request_data_builder,
                                          helpers.get_auth_data(),
                                          'membership', 'households', 'create')

    member_creator = functools.partial(helpers.request_data_builder,
                                       helpers.get_auth_data(), 'membership',
                                       'members', 'create')

    household_retriever = functools.partial(helpers.request_data_builder,
                                            helpers.get_auth_data(),
                                            'membership', 'households', 'read')

    member_retriever = functools.partial(helpers.request_data_builder,
                                         helpers.get_auth_data(), 'membership',
                                         'members', 'read')

    i = 0
    for person in people:
        person_id = None
        household_id = None
        if 'id' in person: continue  # This person doesn't need to be imported.
        if person['household_id'] in households:
            household = households[person['household_id']]
            if not 'id' in household:  # This person's household needs to be imported.
                # if household['status'] in conf.DEFAULT_STATUSES:
                #     household['status'] = conf.DEFAULT_STATUSES[household['status']]
                hh_request = household_creator(household)
                # print hh_request
                hh_data = helpers.post_json(helpers.get_api_url(), hh_request)
                if 'number' in hh_data:
                    print 'Error (%d): %s' % (hh_data['number'],
                                              hh_data['message'])
                    if hh_data['number'] == 421:
                        hh_data = helpers.post_json(
                            helpers.get_api_url(),
                            household_retriever(
                                None, {
                                    'last_name': household['last_name'],
                                    'city': household['city'],
                                    'state': household['state']
                                }))
                        if 'households' in hh_data and len(
                                hh_data['households']) == 1:
                            hh_string = '%s %s in %s, %s' % (
                                household['first_name'],
                                household['last_name'], household['city'],
                                household['state'])
                            household_id = int(hh_data['households'][0]['id'])
                            print 'Found existing record for %s with ID %s' % (
                                hh_string, household_id)
                        else:
                            print 'Unable to find certain match for %s' % hh_string
                elif 'statistics' in hh_data:
                    household_id = int(hh_data['statistics']['last_id'])
            else:
                household_id = int(household['id'])

            if household_id:
                person_data = helpers.post_json(
                    helpers.get_api_url(),
                    member_retriever(
                        None, {
                            'first_name': person['first_name'],
                            'last_name': person['last_name']
                        }))
                if 'members' in person_data and len(
                        person_data['members']) == 1:
                    print "%s %s already in IconCMO. Skipping." % (
                        person['first_name'], person['last_name'])
                    continue
                person['household_id'] = household_id
                # if person['status'] in conf.DEFAULT_STATUSES:
                #     person['status'] = conf.DEFAULT_STATUSES[person['status']]
                if 'phone' in person and person['phone'] == household['phone']:
                    if 'phones' in person:
                        for phone in person['phones']:
                            if phone['id'] == 'Cell':
                                person['phone'] == phone['phone']
                        del person['phones']
                if 'email' in person:
                    del person['email']

                for key in person.keys():
                    if key in household and person[key] == household[key]:
                        if key in ['status', 'last_name', 'first_name']:
                            continue
                        del person[key]
                p_request = member_creator(person)
                # print p_request
                p_data = helpers.post_json(helpers.get_api_url(), p_request)
                # print p_data
            else:
                print 'Unable to import %s %s due to problems with household identity.' % (
                    person['first_name'], person['last_name'])
        i += 1