Exemplo n.º 1
0
def uploadPackages(info, source=0, force=0, caller=None):
    log_debug(4, source, force, caller)
    batch = Collection()
    packageList = info.get("packages") or []
    if not packageList:
        raise Exception("Nothing to do")

    org_id = info.get('orgId')
    if org_id == '':
        org_id = None

    if source:
        channelList = []
    else:
        channelList = info.get("channels") or []

    for package in packageList:
        p = __processPackage(package, org_id, channelList, source)
        batch.append(p)

    if CFG.DB_BACKEND == ORACLE:
        from spacewalk.server.importlib.backendOracle import OracleBackend
        backend = OracleBackend()
    elif CFG.DB_BACKEND == POSTGRESQL:
        from spacewalk.server.importlib.backendOracle import PostgresqlBackend
        backend = PostgresqlBackend()

    backend.init()
    importer = packageImporter(batch, backend, source, caller=caller)

    importer.setUploadForce(force)

    importer.run()
    if not source:
        importer.subscribeToChannels()

    # Split the result in two lists - already uploaded and new packages
    newpkgs = []
    uploaded = []
    for pkg in importer.status():
        if pkg.ignored or pkg.diff:
            uploaded.append(pkg)
        else:
            newpkgs.append(pkg)

    # Schedule an errata cache update only if we touched the channels
    if not source:
        # makes sense only for binary packages
        schedule_errata_cache_update(importer.affected_channels)
        taskomatic.add_to_repodata_queue_for_channel_package_subscription(
                importer.affected_channels, batch, caller)
        rhnSQL.commit()

    return _formatStatus(uploaded), _formatStatus(newpkgs)
Exemplo n.º 2
0
def uploadPackages(info, source=0, force=0, caller=None):
    log_debug(4, source, force, caller)
    batch = Collection()
    packageList = info.get("packages") or []
    if not packageList:
        raise Exception("Nothing to do")

    org_id = info.get('orgId')
    if org_id == '':
        org_id = None

    if source:
        channelList = []
    else:
        channelList = info.get("channels") or []

    for package in packageList:
        p = __processPackage(package, org_id, channelList, source)
        batch.append(p)

    backend = SQLBackend()
    importer = packageImporter(batch, backend, source, caller=caller)

    importer.setUploadForce(force)

    importer.run()
    if not source:
        importer.subscribeToChannels()

    # Split the result in two lists - already uploaded and new packages
    newpkgs = []
    uploaded = []
    for pkg in importer.status():
        if pkg.ignored or pkg.diff:
            uploaded.append(pkg)
        else:
            newpkgs.append(pkg)

    # Schedule an errata cache update only if we touched the channels
    if not source:
        # makes sense only for binary packages
        schedule_errata_cache_update(importer.affected_channels)
        taskomatic.add_to_repodata_queue_for_channel_package_subscription(
            importer.affected_channels, batch, caller)
        rhnSQL.commit()

    return _formatStatus(uploaded), _formatStatus(newpkgs)
Exemplo n.º 3
0
    def _channelPackageSubscription(self, authobj, info):
        # Authorize the org id passed
        authobj.authzOrg(info)

        packageList = info.get('packages') or []
        if not packageList:
            log_debug(1, "No packages found; done")
            return 0

        if 'channels' not in info or not info['channels']:
            log_debug(1, "No channels found; done")
            return 0

        channelList = info['channels']
        authobj.authzChannels(channelList)

        # Have to turn the channel list into a list of Channel objects
        channelList = [Channel().populate({'label': x}) for x in channelList]

        # Since we're dealing with superusers, we allow them to change the org
        # id
        # XXX check if we don't open ourselves too much (misa 20030422)
        org_id = info.get('orgId')
        if org_id == '':
            org_id = None

        batch = Collection()
        package_keys = ['name', 'version', 'release', 'epoch', 'arch']
        for package in packageList:
            for k in package_keys:
                if k not in package:
                    raise Exception("Missing key %s" % k)
                if k == 'epoch':
                    if package[k] is not None:
                        if package[k] == '':
                            package[k] = None
                        else:
                            package[k] = str(package[k])
                else:
                    package[k] = str(package[k])

            if package['arch'] == 'src' or package['arch'] == 'nosrc':
                # Source package - no reason to continue
                continue
            _checksum_sql_filter = ""
            if 'md5sum' in package:  # for old rhnpush compatibility
                package['checksum_type'] = 'md5'
                package['checksum'] = package['md5sum']

            exec_args = {
                'name':    package['name'],
                'pkg_epoch':   package['epoch'],
                'pkg_version': package['version'],
                'pkg_rel':     package['release'],
                'pkg_arch':    package['arch'],
                'orgid':       org_id
            }

            if 'checksum' in package and CFG.ENABLE_NVREA:
                _checksum_sql_filter = """and c.checksum = :checksum
                                          and c.checksum_type = :checksum_type"""
                exec_args.update({'checksum_type': package['checksum_type'],
                                  'checksum':      package['checksum']})

            h = rhnSQL.prepare(self._get_pkg_info_query %
                               _checksum_sql_filter)
            h.execute(**exec_args)
            row = h.fetchone_dict()

            package['checksum_type'] = row['checksum_type']
            package['checksum'] = row['checksum']
            package['org_id'] = org_id
            package['channels'] = channelList
            batch.append(IncompletePackage().populate(package))

        caller = "server.app.channelPackageSubscription"

        backend = SQLBackend()
        importer = ChannelPackageSubscription(batch, backend, caller=caller)
        try:
            importer.run()
        except IncompatibleArchError:
            e = sys.exc_info()[1]
            raise_with_tb(rhnFault(50, string.join(e.args), explain=0), sys.exc_info()[2])
        except InvalidChannelError:
            e = sys.exc_info()[1]
            raise_with_tb(rhnFault(50, str(e), explain=0), sys.exc_info()[2])

        affected_channels = importer.affected_channels

        log_debug(3, "Computing errata cache for systems affected by channels",
                  affected_channels)

        schedule_errata_cache_update(affected_channels)
        rhnSQL.commit()

        return 0
Exemplo n.º 4
0
    def _channelPackageSubscription(self, authobj, info):
        # Authorize the org id passed
        authobj.authzOrg(info)

        packageList = info.get('packages') or []
        if not packageList:
            log_debug(1, "No packages found; done")
            return 0

        if 'channels' not in info or not info['channels']:
            log_debug(1, "No channels found; done")
            return 0

        channelList = info['channels']
        authobj.authzChannels(channelList)

        # Have to turn the channel list into a list of Channel objects
        channelList = [Channel().populate({'label': x}) for x in channelList]

        # Since we're dealing with superusers, we allow them to change the org
        # id
        # XXX check if we don't open ourselves too much (misa 20030422)
        org_id = info.get('orgId')
        if org_id == '':
            org_id = None

        batch = Collection()
        package_keys = ['name', 'version', 'release', 'epoch', 'arch']
        for package in packageList:
            for k in package_keys:
                if k not in package:
                    raise Exception("Missing key %s" % k)
                if k == 'epoch':
                    if package[k] is not None:
                        if package[k] == '':
                            package[k] = None
                        else:
                            package[k] = str(package[k])
                else:
                    package[k] = str(package[k])

            if package['arch'] == 'src' or package['arch'] == 'nosrc':
                # Source package - no reason to continue
                continue
            _checksum_sql_filter = ""
            if 'md5sum' in package:  # for old rhnpush compatibility
                package['checksum_type'] = 'md5'
                package['checksum'] = package['md5sum']

            exec_args = {
                'name': package['name'],
                'pkg_epoch': package['epoch'],
                'pkg_version': package['version'],
                'pkg_rel': package['release'],
                'pkg_arch': package['arch'],
                'orgid': org_id
            }

            if 'checksum' in package and CFG.ENABLE_NVREA:
                _checksum_sql_filter = """and c.checksum = :checksum
                                          and c.checksum_type = :checksum_type"""
                exec_args.update({
                    'checksum_type': package['checksum_type'],
                    'checksum': package['checksum']
                })

            h = rhnSQL.prepare(self._get_pkg_info_query % _checksum_sql_filter)
            h.execute(**exec_args)
            row = h.fetchone_dict()

            package['checksum_type'] = row['checksum_type']
            package['checksum'] = row['checksum']
            package['org_id'] = org_id
            package['channels'] = channelList
            batch.append(IncompletePackage().populate(package))

        caller = "server.app.channelPackageSubscription"

        backend = SQLBackend()
        importer = ChannelPackageSubscription(batch, backend, caller=caller)
        try:
            importer.run()
        except IncompatibleArchError:
            e = sys.exc_info()[1]
            raise_with_tb(rhnFault(50, string.join(e.args), explain=0),
                          sys.exc_info()[2])
        except InvalidChannelError:
            e = sys.exc_info()[1]
            raise_with_tb(rhnFault(50, str(e), explain=0), sys.exc_info()[2])

        affected_channels = importer.affected_channels

        log_debug(3, "Computing errata cache for systems affected by channels",
                  affected_channels)

        schedule_errata_cache_update(affected_channels)
        rhnSQL.commit()

        return 0
Exemplo n.º 5
0
def push_package(a_pkg,
                 org_id=None,
                 force=None,
                 channels=[],
                 relative_path=None):
    """Uploads a package"""

    # First write the package to the filesystem to final location
    try:
        importLib.move_package(a_pkg.payload_stream.name,
                               basedir=CFG.MOUNT_POINT,
                               relpath=relative_path,
                               checksum_type=a_pkg.checksum_type,
                               checksum=a_pkg.checksum,
                               force=1)
    except OSError:
        e = sys.exc_info()[1]
        raise_with_tb(rhnFault(50, "Package upload failed: %s" % e),
                      sys.exc_info()[2])
    except importLib.FileConflictError:
        raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2])
    except:
        raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2])

    pkg = mpmSource.create_package(a_pkg.header,
                                   size=a_pkg.payload_size,
                                   checksum_type=a_pkg.checksum_type,
                                   checksum=a_pkg.checksum,
                                   relpath=relative_path,
                                   org_id=org_id,
                                   header_start=a_pkg.header_start,
                                   header_end=a_pkg.header_end,
                                   channels=channels)

    batch = importLib.Collection()
    batch.append(pkg)

    backend = SQLBackend()

    if force:
        upload_force = 4
    else:
        upload_force = 0
    importer = packageImport.packageImporter(batch,
                                             backend,
                                             source=a_pkg.header.is_source,
                                             caller="server.app.uploadPackage")
    importer.setUploadForce(upload_force)
    importer.run()

    package = batch[0]
    log_debug(5, "Package diff", package.diff)

    if package.diff and not force and package.diff.level > 1:
        # Packages too different; bail out
        log_debug(1, "Packages too different", package.toDict(), "Level:",
                  package.diff.level)
        pdict = package.toDict()
        orig_path = package['path']
        orig_path = os.path.join(CFG.MOUNT_POINT, orig_path)
        log_debug(4, "Original package", orig_path)

        # MPMs do not store their headers on disk, so we must avoid performing
        # operations which rely on information only contained in the headers
        # (such as header signatures).
        if os.path.exists(orig_path) and a_pkg.header.packaging != 'mpm':
            oh = rhn_pkg.get_package_header(orig_path)
            _diff_header_sigs(a_pkg.header, oh, pdict['diff']['diff'])

        return pdict, package.diff.level

    # Remove any pending scheduled file deletion for this package
    h = rhnSQL.prepare("""
        delete from rhnPackageFileDeleteQueue where path = :path
    """)
    h.execute(path=relative_path)

    if package.diff and not force and package.diff.level:
        # No need to copy it - just the path is modified
        # pkilambi bug#180347
        # case 1:check if the path exists in the db and also on the file system.
        # if it does then no need to copy
        # case2: file exists on file system but path not in db.then add the
        # realtive path in the db based on checksum of the pkg
        # case3: if no file on file system but path exists.then we write the
        # file to file system
        # case4:no file exists on FS and no path in db .then we write both.
        orig_path = package['path']
        orig_path = os.path.join(CFG.MOUNT_POINT, orig_path)
        log_debug(3, "Original package", orig_path)

        # check included to query for source and binary rpms
        h_path_sql = """
            select ps.path path
                from %s ps,
                     rhnChecksumView c
            where
                c.checksum = :csum
            and c.checksum_type = :ctype
            and ps.checksum_id = c.id
            and (ps.org_id = :org_id or
                 (ps.org_id is null and :org_id is null)
                )
            """
        if a_pkg.header.is_source:
            h_package_table = 'rhnPackageSource'
        else:
            h_package_table = 'rhnPackage'
        h_path = rhnSQL.prepare(h_path_sql % h_package_table)
        h_path.execute(ctype=a_pkg.checksum_type,
                       csum=a_pkg.checksum,
                       org_id=org_id)

        rs_path = h_path.fetchall_dict()
        path_dict = {}
        if rs_path:
            path_dict = rs_path[0]

        if os.path.exists(orig_path) and path_dict['path']:
            return {}, 0
        elif not path_dict['path']:
            h_upd = rhnSQL.prepare("""
            update rhnpackage
               set path = :path
            where checksum_id = (
                        select id from rhnChecksumView c
                                 where c.checksum = :csum
                                   and c.checksum_type = :ctype)
            """)
            h_upd.execute(path=relative_path,
                          ctype=a_pkg.checksum_type,
                          csum=a_pkg.checksum)

    # commit the transactions
    rhnSQL.commit()
    if not a_pkg.header.is_source:
        # Process Package Key information
        server_packages.processPackageKeyAssociations(a_pkg.header,
                                                      a_pkg.checksum_type,
                                                      a_pkg.checksum)

    if not a_pkg.header.is_source:
        errataCache.schedule_errata_cache_update(importer.affected_channels)

    log_debug(2, "Returning")
    return {}, 0
Exemplo n.º 6
0
        backend = SQLBackend()
        importer = ChannelPackageSubscription(batch, backend, caller=caller)
        try:
            importer.run()
        except IncompatibleArchError, e:
            raise rhnFault(50, string.join(e.args), explain=0), None, sys.exc_info()[2]
        except InvalidChannelError, e:
            raise rhnFault(50, str(e), explain=0), None, sys.exc_info()[2]

        affected_channels = importer.affected_channels

        log_debug(3, "Computing errata cache for systems affected by channels",
            affected_channels)

        schedule_errata_cache_update(affected_channels)
        rhnSQL.commit()

        return 0

    def getAnyChecksum(self, info, username = None, password = None, session = None, is_source = 0):
        """ returns checksum info of available packages
            also does an existance check on the filesystem.
        """
        log_debug(3)

        pkg_infos = info.get('packages')
        channels = info.get('channels', [])
        force = info.get('force', 0)
        orgid = info.get('org_id')
Exemplo n.º 7
0
def push_package(a_pkg, org_id=None, force=None, channels=[], relative_path=None):
    """Uploads a package"""

    if relative_path:
        # First write the package to the filesystem to final location
        try:
            importLib.move_package(
                a_pkg.payload_stream.name,
                basedir=CFG.MOUNT_POINT,
                relpath=relative_path,
                checksum_type=a_pkg.checksum_type,
                checksum=a_pkg.checksum,
                force=1,
            )
        except OSError:
            e = sys.exc_info()[1]
            raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2])
        except importLib.FileConflictError:
            raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2])
        except:
            raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2])

        # Remove any pending scheduled file deletion for this package
        h = rhnSQL.prepare(
            """
            delete from rhnPackageFileDeleteQueue where path = :path
        """
        )
        h.execute(path=relative_path)

    pkg = mpmSource.create_package(
        a_pkg.header,
        size=a_pkg.payload_size,
        checksum_type=a_pkg.checksum_type,
        checksum=a_pkg.checksum,
        relpath=relative_path,
        org_id=org_id,
        header_start=a_pkg.header_start,
        header_end=a_pkg.header_end,
        channels=channels,
    )

    batch = importLib.Collection()
    batch.append(pkg)

    backend = SQLBackend()

    if force:
        upload_force = 4
    else:
        upload_force = 0
    importer = packageImport.packageImporter(
        batch, backend, source=a_pkg.header.is_source, caller="server.app.uploadPackage"
    )
    importer.setUploadForce(upload_force)
    importer.run()

    package = batch[0]
    log_debug(5, "Package diff", package.diff)

    if package.diff and not force and package.diff.level > 1:
        # Packages too different; bail out
        log_debug(1, "Packages too different", package.toDict(), "Level:", package.diff.level)
        pdict = package.toDict()
        orig_path = package["path"]
        orig_path = os.path.join(CFG.MOUNT_POINT, orig_path)
        log_debug(4, "Original package", orig_path)

        # MPMs do not store their headers on disk, so we must avoid performing
        # operations which rely on information only contained in the headers
        # (such as header signatures).
        if os.path.exists(orig_path) and a_pkg.header.packaging != "mpm":
            oh = rhn_pkg.get_package_header(orig_path)
            _diff_header_sigs(a_pkg.header, oh, pdict["diff"]["diff"])

        return pdict, package.diff.level

    if package.diff and not force and package.diff.level:
        # No need to copy it - just the path is modified
        # pkilambi bug#180347
        # case 1:check if the path exists in the db and also on the file system.
        # if it does then no need to copy
        # case2: file exists on file system but path not in db.then add the
        # realtive path in the db based on checksum of the pkg
        # case3: if no file on file system but path exists.then we write the
        # file to file system
        # case4:no file exists on FS and no path in db .then we write both.
        orig_path = package["path"]
        orig_path = os.path.join(CFG.MOUNT_POINT, orig_path)
        log_debug(3, "Original package", orig_path)

        # check included to query for source and binary rpms
        h_path_sql = """
            select ps.path path
                from %s ps,
                     rhnChecksumView c
            where
                c.checksum = :csum
            and c.checksum_type = :ctype
            and ps.checksum_id = c.id
            and (ps.org_id = :org_id or
                 (ps.org_id is null and :org_id is null)
                )
            """
        if a_pkg.header.is_source:
            h_package_table = "rhnPackageSource"
        else:
            h_package_table = "rhnPackage"
        h_path = rhnSQL.prepare(h_path_sql % h_package_table)
        h_path.execute(ctype=a_pkg.checksum_type, csum=a_pkg.checksum, org_id=org_id)

        rs_path = h_path.fetchall_dict()
        path_dict = {}
        if rs_path:
            path_dict = rs_path[0]

        if os.path.exists(orig_path) and path_dict["path"]:
            return {}, 0
        elif not path_dict["path"]:
            h_upd = rhnSQL.prepare(
                """
            update rhnpackage
               set path = :path
            where checksum_id = (
                        select id from rhnChecksumView c
                                 where c.checksum = :csum
                                   and c.checksum_type = :ctype)
            """
            )
            h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type, csum=a_pkg.checksum)

    # commit the transactions
    rhnSQL.commit()
    if not a_pkg.header.is_source:
        # Process Package Key information
        server_packages.processPackageKeyAssociations(a_pkg.header, a_pkg.checksum_type, a_pkg.checksum)

    if not a_pkg.header.is_source:
        errataCache.schedule_errata_cache_update(importer.affected_channels)

    log_debug(2, "Returning")
    return {}, 0
Exemplo n.º 8
0
               set path = :path
            where checksum_id = (
                        select id from rhnChecksumView c
                                 where c.checksum = :csum
                                   and c.checksum_type = :ctype)
            """)
            h_upd.execute(path=relative_path, ctype=checksum_type, csum=checksum)

    # commit the transactions
    rhnSQL.commit()
    if not header.is_source:
        # Process Package Key information
        server_packages.processPackageKeyAssociations(header, checksum_type, checksum)

    if not header.is_source:
        errataCache.schedule_errata_cache_update(importer.affected_channels)
                        
    log_debug(2, "Returning")
    return {}, 0

def _diff_header_sigs(h1, h2, diff_list):
    # XXX This can be far more complicated if we take into account that
    # signatures can be different
    h1sigs = h1.signatures
    h2sigs = h2.signatures
    if not h1sigs and not h2sigs:
        # No differences here
        return
    h1_key_ids = _key_ids(h1sigs)
    h2_key_ids = _key_ids(h2sigs)
Exemplo n.º 9
0
                        select id from rhnChecksumView c
                                 where c.checksum = :csum
                                   and c.checksum_type = :ctype)
            """)
            h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type,
                          csum=a_pkg.checksum)

    # commit the transactions
    rhnSQL.commit()
    if not a_pkg.header.is_source:
        # Process Package Key information
        server_packages.processPackageKeyAssociations(a_pkg.header,
                                                      a_pkg.checksum_type, a_pkg.checksum)

    if not a_pkg.header.is_source:
        errataCache.schedule_errata_cache_update(importer.affected_channels)

    log_debug(2, "Returning")
    return {}, 0


def _diff_header_sigs(h1, h2, diff_list):
    # XXX This can be far more complicated if we take into account that
    # signatures can be different
    h1sigs = h1.signatures
    h2sigs = h2.signatures
    if not h1sigs and not h2sigs:
        # No differences here
        return
    h1_key_ids = _key_ids(h1sigs)
    h2_key_ids = _key_ids(h2sigs)