Exemple #1
0
    def test_create_or_update_existing_type_collection(self):
        """
        Tests calling create_or_update with a change to an existing type
        collection is successful.
        """

        # Setup
        type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], [])
        types_db._create_or_update_type(type_def)

        # Test
        type_def.display_name = 'new-name'
        type_def.description = 'new-description'
        type_def.unit_key = 'new-key'
        type_def.search_indexes = None
        types_db._create_or_update_type(type_def)

        # Verify

        #   Present in types collection
        all_types = list(ContentType.get_collection().find())
        self.assertEqual(1, len(all_types))

        found = all_types[0]
        self.assertEqual(type_def.id, found['id'])
        self.assertEqual(type_def.display_name, found['display_name'])
        self.assertEqual(type_def.description, found['description'])
        self.assertEqual(type_def.unit_key, found['unit_key'])
        self.assertEqual(type_def.search_indexes, found['search_indexes'])

        #   Type collection exists
        collection_name = types_db.unit_collection_name(type_def.id)
        self.assertTrue(collection_name in pulp_db.get_database().collection_names())
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    # If 'repo_content_units' is not defined we don't need to do anything
    if 'repo_content_units' not in db.collection_names():
        return

    collection = db['repo_content_units']
    # Don't check whether we should run based on the index as that may have been cleared out
    # by a different migration
    if collection.find_one({'owner_type': {'$exists': True}}):
        remove_duplicates(collection)

        _logger.info("Removing unused fields (owner_type, owner_id) from repo_content_units")
        collection.update({}, {'$unset': {'owner_type': "", 'owner_id': ''}}, multi=True)

    index_info = collection.index_information()
    if "repo_id_-1_unit_type_id_-1_unit_id_-1_owner_type_-1_owner_id_-1" in index_info:
        _logger.info("Dropping the uniqueness index that included the owner_type & owner_id")
        collection.drop_index("repo_id_-1_unit_type_id_-1_unit_id_-1_owner_type_-1_owner_id_-1")
def migrate(*args, **kwargs):
    """
    Adds a "checksums" DictField and populates it with the known checksum type and value.

    Templatizes the XML in "repodata".

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    try:
        ET.register_namespace('rpm', RPM_NAMESPACE)
    except AttributeError:
        # python 2.6 doesn't have the register_namespace function
        ET._namespace_map[RPM_NAMESPACE] = 'rpm'

    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']
    drpm_collection = db['units_drpm']

    for rpm in rpm_collection.find({},
                                   ['checksum', 'checksumtype', 'repodata']):
        migrate_rpm_base(rpm_collection, rpm)
    for srpm in srpm_collection.find({},
                                     ['checksum', 'checksumtype', 'repodata']):
        migrate_rpm_base(srpm_collection, srpm)

    migrate_drpms(drpm_collection)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    repos = db['repos']
    repo_distributors = db['repo_distributors']
    repo_objects = repos.find({'notes': {'_repo-type': 'rpm-repo'}})
    for repo_object in repo_objects:
        distributors = list(repo_distributors.find({'repo_id': repo_object['repo_id']}))
        _clean_distributors_relative_url(repo_distributors, distributors)
        yum_distributor = _find_yum_distributor(distributors)
        for distributor in distributors:

            if distributor['distributor_type_id'] == 'export_distributor' and \
                    'relative_url' not in distributor['config']:

                if yum_distributor is None:
                    relative_url = repo_object['repo_id']
                else:
                    relative_url = yum_distributor['config']['relative_url']

                distributor['config']['relative_url'] = relative_url

                repo_distributors.update_one({'_id': distributor['_id']},
                                             {'$set': {'config': distributor['config']}})
Exemple #5
0
def rebuild_content_unit_counts(repository):
    """
    Update the content_unit_counts field on a Repository.

    :param repository: The repository to update
    :type repository: pulp.server.db.model.Repository
    """
    db = connection.get_database()

    pipeline = [{
        '$match': {
            'repo_id': repository.repo_id
        }
    }, {
        '$group': {
            '_id': '$unit_type_id',
            'sum': {
                '$sum': 1
            }
        }
    }]
    q = db.command('aggregate', 'repo_content_units', pipeline=pipeline)

    # Flip this into the form that we need
    counts = {}
    for result in q['result']:
        counts[result['_id']] = result['sum']

    repository.content_unit_counts = counts
    repository.save()
def migrate(*args, **kwargs):
    """
    Adds a "checksums" DictField and populates it with the known checksum type and value.

    Templatizes the XML in "repodata".

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    try:
        ET.register_namespace('rpm', RPM_NAMESPACE)
    except AttributeError:
        # python 2.6 doesn't have the register_namespace function
        ET._namespace_map[RPM_NAMESPACE] = 'rpm'

    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']
    drpm_collection = db['units_drpm']

    for rpm in rpm_collection.find({}, ['checksum', 'checksumtype', 'repodata']):
        migrate_rpm_base(rpm_collection, rpm)
    for srpm in srpm_collection.find({}, ['checksum', 'checksumtype', 'repodata']):
        migrate_rpm_base(srpm_collection, srpm)

    migrate_drpms(drpm_collection)
Exemple #7
0
def _create_or_update_type(type_def):
    """
    This method creates or updates a type definition in MongoDB.

    :param type_def: the type definition to update or create. If a type definition with the same
                     as an existing type, the type is updated, otherwise it is created.
    :type  type_def: ContentType

    :return: This method will always return None
    :rtype:  None
    """
    # Make sure a collection exists for the type
    database = pulp_db.get_database()
    collection_name = unit_collection_name(type_def.id)

    if collection_name not in database.collection_names():
        pulp_db.get_collection(collection_name, create=True)

    # Add or update an entry in the types list
    content_type_collection = ContentType.get_collection()
    content_type = ContentType(
        type_def.id, type_def.display_name, type_def.description, type_def.unit_key,
        type_def.search_indexes, type_def.referenced_types)
    # no longer rely on _id = id
    existing_type = content_type_collection.find_one({'id': type_def.id}, fields=[])
    if existing_type is not None:
        content_type._id = existing_type['_id']
    # XXX this still causes a potential race condition when 2 users are updating the same type
    content_type_collection.save(content_type, safe=True)
Exemple #8
0
    def test_create_or_update_type_collection(self):
        """
        Tests the call to create a new type collection works.
        """

        # Setup
        type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], [])

        # Test
        types_db._create_or_update_type(type_def)

        # Verify

        #   Present in types collection
        all_types = list(ContentType.get_collection().find())
        self.assertEqual(1, len(all_types))

        found = all_types[0]
        self.assertEqual(type_def.id, found['id'])
        self.assertEqual(type_def.display_name, found['display_name'])
        self.assertEqual(type_def.description, found['description'])
        self.assertEqual(type_def.unit_key, found['unit_key'])
        self.assertEqual(type_def.search_indexes, found['search_indexes'])

        #   Type collection exists
        collection_name = types_db.unit_collection_name(type_def.id)
        self.assertTrue(collection_name in pulp_db.get_database().collection_names())
Exemple #9
0
def migrate(*args, **kwargs):
    """
    Move erratum pkglists to a separate collection.

    It is safe to run this migration multiple times.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    # create indexes for erratum_pkglists collection before migrating data,
    # this is important if the migration was interrupted and then re-run again
    indexes = ErratumPkglist._meta['indexes']
    ErratumPkglist._meta['index_specs'] = ErratumPkglist._build_index_specs(indexes)
    ErratumPkglist.ensure_indexes()

    db = connection.get_database()
    erratum_collection = db['units_erratum']
    pkglist_collection = db['erratum_pkglists']

    total_erratum_units = erratum_collection.count()

    with utils.MigrationProgressLog('Erratum', total_erratum_units) as migration_log:
        for erratum in erratum_collection.find({}, ['errata_id', 'pkglist']).batch_size(100):
            migrate_erratum_pkglist(erratum_collection, pkglist_collection, erratum)
            migration_log.progress()
Exemple #10
0
    def test_create_or_update_existing_type_collection(self):
        """
        Tests calling create_or_update with a change to an existing type
        collection is successful.
        """

        # Setup
        type_def = TypeDefinition("rpm", "RPM", "RPM Packages", ["name"], ["name"], [])
        types_db._create_or_update_type(type_def)

        # Test
        type_def.display_name = "new-name"
        type_def.description = "new-description"
        type_def.unit_key = "new-key"
        type_def.search_indexes = None
        types_db._create_or_update_type(type_def)

        # Verify

        #   Present in types collection
        all_types = list(ContentType.get_collection().find())
        self.assertEqual(1, len(all_types))

        found = all_types[0]
        self.assertEqual(type_def.id, found["id"])
        self.assertEqual(type_def.display_name, found["display_name"])
        self.assertEqual(type_def.description, found["description"])
        self.assertEqual(type_def.unit_key, found["unit_key"])
        self.assertEqual(type_def.search_indexes, found["search_indexes"])

        #   Type collection exists
        collection_name = types_db.unit_collection_name(type_def.id)
        self.assertTrue(collection_name in pulp_db.get_database().collection_names())
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    collection = db["units_distribution"]
    collection.update({}, {"$rename": {"id": "distribution_id"}})
    _drop_and_silence_exception(collection, "id_1")
    _drop_and_silence_exception(collection, "id_1_family_1_variant_1_version_1_arch_1")

    collection = db["units_erratum"]
    collection.update({}, {"$rename": {"id": "errata_id"}})
    collection.update({}, {"$rename": {"from": "errata_from"}})
    _drop_and_silence_exception(collection, "id_1")

    collection = db["units_package_group"]
    collection.update({}, {"$rename": {"id": "package_group_id"}})
    _drop_and_silence_exception(collection, "id_1")
    _drop_and_silence_exception(collection, "id_1_repo_id_1")

    collection = db["units_package_category"]
    collection.update({}, {"$rename": {"id": "package_category_id"}})
    _drop_and_silence_exception(collection, "id_1")
    _drop_and_silence_exception(collection, "id_1_repo_id_1")

    collection = db["units_package_environment"]
    collection.update({}, {"$rename": {"id": "package_environment_id"}})
    _drop_and_silence_exception(collection, "id_1")
    _drop_and_silence_exception(collection, "id_1_repo_id_1")
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    errata_collection = db['units_erratum']

    for erratum in errata_collection.find({'pushcount': {'$type': 10}}, {'pushcount': 1}):
        errata_collection.update({'_id': erratum['_id']}, {'$unset': {'pushcount': ""}})

    for erratum in errata_collection.find({'pushcount': {'$exists': True}}, {'pushcount': 1}):
        changed = False
        if not isinstance(erratum['pushcount'], basestring):
            if isinstance(erratum['pushcount'], float):
                erratum['pushcount'] = int(erratum['pushcount'])
            if isinstance(erratum['pushcount'], int):
                changed = True
                erratum['pushcount'] = str(erratum['pushcount'])
        if changed:
            errata_collection.update({'_id': erratum['_id']},
                                     {'$set': {'pushcount': erratum['pushcount']}})
Exemple #13
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    repos = db['repos']
    repo_distributors = db['repo_distributors']
    repo_objects = repos.find({'notes': {'_repo-type': 'rpm-repo'}})
    for repo_object in repo_objects:
        distributors = list(
            repo_distributors.find({'repo_id': repo_object['repo_id']}))
        _clean_distributors_relative_url(repo_distributors, distributors)
        yum_distributor = _find_yum_distributor(distributors)
        for distributor in distributors:

            if distributor['distributor_type_id'] == 'export_distributor' and \
                    'relative_url' not in distributor['config']:

                if yum_distributor is None:
                    relative_url = repo_object['repo_id']
                else:
                    relative_url = yum_distributor['config']['relative_url']

                distributor['config']['relative_url'] = relative_url

                repo_distributors.update_one(
                    {'_id': distributor['_id']},
                    {'$set': {
                        'config': distributor['config']
                    }})
Exemple #14
0
def _create_or_update_type(type_def):
    """
    This method creates or updates a type definition in MongoDB.

    :param type_def: the type definition to update or create. If a type definition with the same
                     as an existing type, the type is updated, otherwise it is created.
    :type  type_def: ContentType

    :return: This method will always return None
    :rtype:  None
    """
    # Make sure a collection exists for the type
    database = connection.get_database()
    collection_name = unit_collection_name(type_def.id)

    if collection_name not in database.collection_names():
        connection.get_collection(collection_name, create=True)

    # Add or update an entry in the types list
    content_type_collection = ContentType.get_collection()
    content_type = ContentType(type_def.id, type_def.display_name,
                               type_def.description, type_def.unit_key,
                               type_def.search_indexes,
                               type_def.referenced_types)
    # no longer rely on _id = id
    existing_type = content_type_collection.find_one({'id': type_def.id},
                                                     projection=[])
    if existing_type is not None:
        content_type._id = existing_type['_id']
    # XXX this still causes a potential race condition when 2 users are updating the same type
    content_type_collection.save(content_type)
Exemple #15
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.
    """
    db = connection.get_database()
    collection = db['units_python_package']
    set_packagetype(collection)
    update_fields(collection)
    collection.drop_indexes()
Exemple #16
0
def migrate(*args, **kwargs):
    """
    Compress the content of repodata field for RPM and SRPM units.
    Migration can be safely re-run multiple times.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']

    total_rpm_units = rpm_collection.count()
    total_srpm_units = srpm_collection.count()

    msg = '* NOTE: This migration may take some time depending on the size of your Pulp content. *'
    stars = '*' * len(msg)
    progress_msg = '* Migrated units: %s of %s'

    _logger.info(stars)
    _logger.info(msg)
    _logger.info(stars)

    if total_rpm_units:
        _logger.info('* Migrating RPM content...')

    migrated_units = 0
    for rpm in rpm_collection.find({}, ['repodata']).batch_size(100):
        migrate_rpm_base(rpm_collection, rpm)

        migrated_units += 1
        another_ten_percent_completed = total_rpm_units >= 10 and \
            not migrated_units % (total_rpm_units // 10)
        all_units_migrated = migrated_units == total_rpm_units
        if another_ten_percent_completed or all_units_migrated:
            _logger.info(progress_msg % (migrated_units, total_rpm_units))

    if total_srpm_units:
        _logger.info('* Migrating SRPM content...')

    migrated_units = 0
    for srpm in srpm_collection.find({}, ['repodata']).batch_size(100):
        migrate_rpm_base(srpm_collection, srpm)

        migrated_units += 1
        another_ten_percent_completed = total_srpm_units >= 10 and \
            not migrated_units % (total_srpm_units // 10)
        all_units_migrated = migrated_units == total_srpm_units
        if another_ten_percent_completed or all_units_migrated:
            _logger.info(progress_msg % (migrated_units, total_srpm_units))

    _logger.info(stars)
def migrate(*args, **kwargs):
    """
    Compress the content of repodata field for RPM and SRPM units.
    Migration can be safely re-run multiple times.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']

    total_rpm_units = rpm_collection.count()
    total_srpm_units = srpm_collection.count()

    msg = '* NOTE: This migration may take some time depending on the size of your Pulp content. *'
    stars = '*' * len(msg)
    progress_msg = '* Migrated units: %s of %s'

    _logger.info(stars)
    _logger.info(msg)
    _logger.info(stars)

    if total_rpm_units:
        _logger.info('* Migrating RPM content...')

    migrated_units = 0
    for rpm in rpm_collection.find({}, ['repodata']).batch_size(100):
        migrate_rpm_base(rpm_collection, rpm)

        migrated_units += 1
        another_ten_percent_completed = total_rpm_units >= 10 and \
            not migrated_units % (total_rpm_units // 10)
        all_units_migrated = migrated_units == total_rpm_units
        if another_ten_percent_completed or all_units_migrated:
            _logger.info(progress_msg % (migrated_units, total_rpm_units))

    if total_srpm_units:
        _logger.info('* Migrating SRPM content...')

    migrated_units = 0
    for srpm in srpm_collection.find({}, ['repodata']).batch_size(100):
        migrate_rpm_base(srpm_collection, srpm)

        migrated_units += 1
        another_ten_percent_completed = total_srpm_units >= 10 and \
            not migrated_units % (total_srpm_units // 10)
        all_units_migrated = migrated_units == total_srpm_units
        if another_ten_percent_completed or all_units_migrated:
            _logger.info(progress_msg % (migrated_units, total_srpm_units))

    _logger.info(stars)
def migrate(*args, **kwargs):
    """
    Ensure all content units have the _last_updated attribute.
    """
    database = connection.get_database()
    for name in database.collection_names():
        if not name.startswith(TYPE_COLLECTION_PREFIX):
            continue
        collection = connection.get_collection(name)
        for unit in collection.find(QUERY):
            unit[LAST_UPDATED] = NEVER
            collection.save(unit)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    collection = db['celery_taskmeta']
    collection.drop()
def migrate(*args, **kwargs):
    """
    Ensure all content units have the _last_updated attribute.
    """
    database = connection.get_database()
    for name in database.collection_names():
        if not name.startswith(TYPE_COLLECTION_PREFIX):
            continue
        collection = connection.get_collection(name)
        for unit in collection.find(QUERY):
            unit[LAST_UPDATED] = NEVER
            collection.save(unit, safe=True)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    collection = db['repos']
    collection.drop_index("id_-1")
    collection.update({}, {"$rename": {"id": "repo_id"}}, multi=True)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    fix_translated_fields_string_to_dict(db['units_package_category'])
    fix_translated_fields_string_to_dict(db['units_package_environment'])
    fix_translated_fields_string_to_dict(db['units_package_group'])
Exemple #23
0
def migrate(*args, **kwargs):
    """
    Clean up duplicated collections in erratum pkglist.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    erratum_collection = db['units_erratum']

    for erratum in erratum_collection.find({}, ['pkglist']).batch_size(100):
        migrate_erratum(erratum_collection, erratum)
Exemple #24
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    repos_collection = db['repos']
    for repo in repos_collection.find():
        repo_id = repo['repo_id']
        rebuild_content_unit_counts(db, repo_id)
Exemple #25
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    repos_queryset = db['repos'].find({}, {'repo_id': 1})
    repo_ids = [x['repo_id'] for x in repos_queryset]
    for repo_id in repo_ids:
        rebuild_content_unit_counts(db, repo_id)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    repos_collection = db['repos']
    for repo in repos_collection.find():
        repo_id = repo['repo_id']
        rebuild_content_unit_counts(db, repo_id)
def migrate(*args, **kwargs):
    """
    Clean up duplicated collections in erratum pkglist.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    erratum_collection = db['units_erratum']

    for erratum in erratum_collection.find({}, ['pkglist']).batch_size(100):
        migrate_erratum(erratum_collection, erratum)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    fix_translated_fields_string_to_dict(db['units_package_category'])
    fix_translated_fields_string_to_dict(db['units_package_environment'])
    fix_translated_fields_string_to_dict(db['units_package_group'])
Exemple #29
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    collection = db['repo_importers']
    collection.update({}, {"$unset": {"id": True}}, multi=True)
    collection.drop_index("repo_id_-1_id_-1")
    collection.update({}, {"$unset": {"scheduled_syncs": ""}}, multi=True)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    collection = db['units_rpm']
    collection.update(
        {"checksum_type": {"$exists": True}},
        {"$unset": {"checksum_type": True}},
        multi=True)
Exemple #31
0
def get_mongo_conn_status():
    """
    Perform a simple mongo operation and return success or failure.

    This uses a "raw" pymongo Collection to avoid any auto-retry logic.

    :returns:          mongo connection status
    :rtype:            bool
    """
    try:
        db = connection.get_database()
        db.workers.count()
        return {'connected': True}
    except:
        return {'connected': False}
Exemple #32
0
def get_mongo_conn_status():
    """
    Perform a simple mongo operation and return success or failure.

    This uses a "raw" pymongo Collection to avoid any auto-retry logic.

    :returns:          mongo connection status
    :rtype:            bool
    """
    try:
        db = connection.get_database()
        db.workers.count()
        return {'connected': True}
    except:
        return {'connected': False}
Exemple #33
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    units_distribution_collection = db['units_distribution']
    units_distribution_collection.update(
        {'pulp_distribution_xml_file': {'$exists': True}},
        {'$unset': {'pulp_distribution_xml_file': True}},
        multi=True
    )
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    units_drpm_collection = db['units_drpm']
    units_drpm_collection.update(
        {'relativepath': {'$exists': True}},
        {'$unset': {'relativepath': True}},
        multi=True
    )
Exemple #35
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    # find repos but return only repo_id using projection
    repos_queryset = db['repos'].find({}, {'repo_id': 1})

    repo_ids = [x['repo_id'] for x in repos_queryset]
    for repo_id in repo_ids:
        rebuild_content_unit_counts(db, repo_id)
Exemple #36
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    units_erratum_collection = db['units_erratum']
    units_erratum_collection.update({'_rpm_references': {
        '$exists': True
    }}, {'$unset': {
        '_rpm_references': True
    }},
                                    multi=True)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    collection = db['units_rpm']
    collection.update({"checksum_type": {
        "$exists": True
    }}, {"$unset": {
        "checksum_type": True
    }},
                      multi=True)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    # If 'archived_calls' is not present, do nothing.
    if 'archived_calls' not in db.collection_names():
        return

    # If it's present, drop it
    collection = db['archived_calls']
    collection.drop()
    msg = _("Deleted the archived_calls collection.")
    _logger.info(msg)
Exemple #39
0
def _create_or_update_type(type_def):

    # Make sure a collection exists for the type
    database = pulp_db.get_database()
    collection_name = unit_collection_name(type_def.id)

    if collection_name not in database.collection_names():
        pulp_db.get_collection(collection_name, create=True)

    # Add or update an entry in the types list
    content_type_collection = ContentType.get_collection()
    content_type = ContentType(type_def.id, type_def.display_name, type_def.description,
                               type_def.unit_key, type_def.search_indexes, type_def.referenced_types)
    # no longer rely on _id = id
    existing_type = content_type_collection.find_one({'id': type_def.id}, fields=[])
    if existing_type is not None:
        content_type._id = existing_type['_id']
    # XXX this still causes a potential race condition when 2 users are updating the same type
    content_type_collection.save(content_type, safe=True)
def migrate(*args, **kwargs):
    """
    Update uploaded RPMs and SRPMs metadata to contain same data as if they were synced.
    This migration operates on all units which have, group and summary empty, this includes not
    only uploaded units.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']

    for rpm in rpm_collection.find({"group": None, "summary": None}):
        fix_metadata(rpm_collection, rpm)
    for srpm in srpm_collection.find({"group": None, "summary": None}):
        fix_metadata(srpm_collection, srpm)
Exemple #41
0
def rebuild_content_unit_counts(repository):
    """
    Update the content_unit_counts field on a Repository.

    :param repository: The repository to update
    :type repository: pulp.server.db.model.Repository
    """
    db = connection.get_database()

    pipeline = [{"$match": {"repo_id": repository.repo_id}}, {"$group": {"_id": "$unit_type_id", "sum": {"$sum": 1}}}]
    q = db.command("aggregate", "repo_content_units", pipeline=pipeline)

    # Flip this into the form that we need
    counts = {}
    for result in q["result"]:
        counts[result["_id"]] = result["sum"]

    # Use the raw query since there is currently a conflict with the id and the repo_id fields
    model.Repository.objects(__raw__={"id": repository.repo_id}).update_one(set__content_unit_counts=counts)
def migrate(*args, **kwargs):
    """
    Update uploaded RPMs and SRPMs metadata to contain same data as if they were synced.
    This migration operates on all units which have, group and summary empty, this includes not
    only uploaded units.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']

    for rpm in rpm_collection.find({"group": None, "summary": None}):
        fix_metadata(rpm_collection, rpm)
    for srpm in srpm_collection.find({"group": None, "summary": None}):
        fix_metadata(srpm_collection, srpm)
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    collection = db['units_distribution']
    _drop_and_silence_exception(collection, 'id_1')
    _drop_and_silence_exception(collection,
                                'id_1_family_1_variant_1_version_1_arch_1')
    collection.update({}, {"$rename": {"id": "distribution_id"}}, multi=True)

    collection = db['units_erratum']
    _drop_and_silence_exception(collection, 'id_1')
    collection.update({}, {"$rename": {"id": "errata_id"}}, multi=True)
    collection.update({}, {"$rename": {"from": "errata_from"}}, multi=True)

    collection = db['units_package_group']
    _drop_and_silence_exception(collection, 'id_1')
    _drop_and_silence_exception(collection, 'id_1_repo_id_1')
    collection.update({}, {"$rename": {"id": "package_group_id"}}, multi=True)

    collection = db['units_package_category']
    _drop_and_silence_exception(collection, 'id_1')
    _drop_and_silence_exception(collection, 'id_1_repo_id_1')
    collection.update({}, {"$rename": {
        "id": "package_category_id"
    }},
                      multi=True)

    collection = db['units_package_environment']
    _drop_and_silence_exception(collection, 'id_1')
    _drop_and_silence_exception(collection, 'id_1_repo_id_1')
    collection.update({}, {"$rename": {
        "id": "package_environment_id"
    }},
                      multi=True)
Exemple #44
0
def clean():
    """
    Purges the database of all types and their associated collections. This
    isn't really meant to be run from Pulp server code but rather as a utility
    for test cases.
    """

    LOG.info('Purging the database of all content type definitions and collections')

    # Search the database instead of just going on what's in the type listing
    # just in case they got out of sync
    database = pulp_db.get_database()
    all_collection_names = database.collection_names()
    type_collection_names = [n for n in all_collection_names if n.startswith(TYPE_COLLECTION_PREFIX)]
    for drop_me in type_collection_names:
        database[drop_me].drop()

    # Purge the types collection of all entries
    type_collection = ContentType.get_collection()
    type_collection.remove(safe=True)
Exemple #45
0
def migrate(*args, **kwargs):
    """
    Ensure all importers & distributors and associated results collections
    have their timestamp fields in UTC.
    """
    fields_to_update = [['repo_distributors', 'last_publish'],
                        ['repo_group_distributors', 'last_publish'],
                        ['repo_publish_results', 'started'],
                        ['repo_publish_results', 'completed'],
                        ['repo_group_publish_results', 'started'],
                        ['repo_group_publish_results', 'completed'],
                        ['repo_importers', 'last_sync'],
                        ['repo_sync_results', 'started'],
                        ['repo_sync_results', 'completed']]

    db = connection.get_database()
    collection_list = db.collection_names()
    for collection_name, field_name in fields_to_update:
        if collection_name in collection_list:
            update_time_to_utc_on_collection(collection_name, field_name)
def migrate(*args, **kwargs):
    """
    Move erratum pkglists to a separate collection.

    It is safe to run this migration multiple times.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()
    erratum_collection = db['units_erratum']
    pkglist_collection = db['erratum_pkglists']

    total_erratum_units = erratum_collection.count()

    with utils.MigrationProgressLog('Erratum', total_erratum_units) as migration_log:
        for erratum in erratum_collection.find({}, ['errata_id', 'pkglist']).batch_size(100):
            migrate_erratum_pkglist(erratum_collection, pkglist_collection, erratum)
            migration_log.progress()
Exemple #47
0
def rebuild_content_unit_counts(repository):
    """
    Update the content_unit_counts field on a Repository.

    :param repository: The repository to update
    :type repository: pulp.server.db.model.Repository
    """
    db = connection.get_database()

    pipeline = [
        {'$match': {'repo_id': repository.repo_id}},
        {'$group': {'_id': '$unit_type_id', 'sum': {'$sum': 1}}}]
    q = db.command('aggregate', 'repo_content_units', pipeline=pipeline)

    # Flip this into the form that we need
    counts = {}
    for result in q['result']:
        counts[result['_id']] = result['sum']

    repository.content_unit_counts = counts
    repository.save()
def migrate(*args, **kwargs):
    """
    Ensure all importers & distributors and associated results collections
    have their timestamp fields in UTC.
    """
    fields_to_update = [['repo_distributors', 'last_publish'],
                        ['repo_group_distributors', 'last_publish'],
                        ['repo_publish_results', 'started'],
                        ['repo_publish_results', 'completed'],
                        ['repo_group_publish_results', 'started'],
                        ['repo_group_publish_results', 'completed'],
                        ['repo_importers', 'last_sync'],
                        ['repo_sync_results', 'started'],
                        ['repo_sync_results', 'completed']
                        ]

    db = connection.get_database()
    collection_list = db.collection_names()
    for collection_name, field_name in fields_to_update:
        if collection_name in collection_list:
            update_time_to_utc_on_collection(collection_name, field_name)
Exemple #49
0
def rebuild_content_unit_counts(repository):
    """
    Update the content_unit_counts field on a Repository.

    :param repository: The repository to update
    :type repository: pulp.server.db.model.Repository
    """
    db = connection.get_database()

    pipeline = [
        {'$match': {'repo_id': repository.repo_id}},
        {'$group': {'_id': '$unit_type_id', 'sum': {'$sum': 1}}}]
    q = db.command('aggregate', 'repo_content_units', pipeline=pipeline)

    # Flip this into the form that we need
    counts = {}
    for result in q['result']:
        counts[result['_id']] = result['sum']

    # Use the raw query since there is currently a conflict with the id and the repo_id fields
    model.Repository.objects(__raw__={'id': repository.repo_id}).update_one(
        set__content_unit_counts=counts)
Exemple #50
0
def migrate(*args, **kwargs):
    """
    Migrate RPMs and SRPMs location to new format "Packages/[a-z]/.*".

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    try:
        ET.register_namespace('rpm', RPM_NAMESPACE)
    except AttributeError:
        # python 2.6 doesn't have the register_namespace function
        ET._namespace_map[RPM_NAMESPACE] = 'rpm'

    db = connection.get_database()
    rpm_collection = db['units_rpm']
    srpm_collection = db['units_srpm']

    for rpm in rpm_collection.find({}, ['repodata', 'filename']):
        migrate_rpm_base(rpm_collection, rpm)
    for srpm in srpm_collection.find({}, ['repodata', 'filename']):
        migrate_rpm_base(srpm_collection, srpm)
Exemple #51
0
def migrate(*args, **kwargs):
    """
    Perform the migration as described in this module's docblock.

    :param args:   unused
    :type  args:   list
    :param kwargs: unused
    :type  kwargs: dict
    """
    db = connection.get_database()

    # If 'repo_content_units' is not defined we don't need to do anything
    if 'repo_content_units' not in db.collection_names():
        return

    collection = db['repo_content_units']
    # Don't check whether we should run based on the index as that may have been cleared out
    # by a different migration
    if collection.find_one({'owner_type': {'$exists': True}}):
        remove_duplicates(collection)

        _logger.info(
            "Removing unused fields (owner_type, owner_id) from repo_content_units"
        )
        collection.update({}, {'$unset': {
            'owner_type': "",
            'owner_id': ''
        }},
                          multi=True)

    index_info = collection.index_information()
    if "repo_id_-1_unit_type_id_-1_unit_id_-1_owner_type_-1_owner_id_-1" in index_info:
        _logger.info(
            "Dropping the uniqueness index that included the owner_type & owner_id"
        )
        collection.drop_index(
            "repo_id_-1_unit_type_id_-1_unit_id_-1_owner_type_-1_owner_id_-1")