def _rpc_call(self, function_name, params):
     get_server_obj = self.login()
     # Try a couple of times
     cfg = config.initUp2dateConfig()
     for i in range(cfg["networkRetries"]):
         try:
             ret = apply(getattr(get_server_obj, function_name), params)
         except rpclib.ProtocolError, e:
             # We have two codes to check: the HTTP error code, and the
             # combination (failtCode, faultString) encoded in the headers
             # of the request.
             http_error_code = e.errcode
             fault_code, fault_string = rpclib.reportError(e.headers)
             if http_error_code == 401 and fault_code == -34:
                 # Login token expired
                 get_server_obj = self.login(force=1)
                 continue
             if http_error_code == 404 and fault_code == -17:
                 # File not found
                 self.extinctErrorYN = 1
                 return None
             log(-1, "ERROR: http error code :%s; fault code: %s; %s" % (http_error_code, fault_code, fault_string))
             # XXX
             raise
         else:
             return ret
Exemple #2
0
def import_channels(channels, orgid=None, master=None):
    collection = ChannelCollection()
    batch = []
    import satCerts
    orgs = map(lambda a: a['id'], satCerts.get_all_orgs())
    org_map = None
    my_backend = diskImportLib.get_backend()
    if master:
        org_map = my_backend.lookupOrgMap(master)['master-id-to-local-id']
    for c in channels:
        try:
            timestamp = collection.get_channel_timestamp(c)
        except KeyError:
            raise Exception, "Could not find channel %s" % c, sys.exc_info()[2]
        c_obj = collection.get_channel(c, timestamp)
        if c_obj is None:
            raise Exception, "Channel not found in cache: %s" % c

        # Check to see if we're asked to sync to an orgid,
        # make sure the org from the export is not null org,
        # finally if the orgs differ so we might wanna use
        # requested org's channel-family.
        # TODO: Move these checks somewhere more appropriate
        if not orgid and c_obj['org_id'] is not None:
            #If the src org is not present default to org 1
            orgid = DEFAULT_ORG
        if orgid is not None and c_obj['org_id'] is not None and \
            c_obj['org_id'] != orgid:
            #If we know the master this is coming from and the master org
            #has been mapped to a local org, transform org_id to the local
            #org_id. Otherwise just put it in the default org.
            if (org_map and c_obj['org_id'] in org_map.keys()
                    and org_map[c_obj['org_id']]):
                c_obj['org_id'] = org_map[c_obj['org_id']]
            else:
                c_obj['org_id'] = orgid
                if c_obj.has_key('trust_list'):
                    del(c_obj['trust_list'])
            for family in c_obj['families']:
                family['label'] = 'private-channel-family-' + \
                                           str(c_obj['org_id'])
        # If there's a trust list on the channel, transform the org ids to
        # the local ones
        if c_obj.has_key('trust_list') and c_obj['trust_list']:
            trusts = []
            for trust in c_obj['trust_list']:
                if org_map.has_key(trust['org_trust_id']):
                    trust['org_trust_id'] = org_map[trust['org_trust_id']]
                    trusts.append(trust)
            c_obj['trust_list'] = trusts

        syncLib.log(6, "Syncing Channel %s to Org %s " % \
                       (c_obj['label'], c_obj['org_id']))
        batch.append(c_obj)

    importer = channelImport.ChannelImport(batch, my_backend)
    # Don't commit just yet
    importer.will_commit = 0
    importer.run()
    return importer
Exemple #3
0
 def _rpc_call(self, function_name, params):
     get_server_obj = self.login()
     # Try a couple of times
     fault_count = 0
     expired_token = 0
     cfg = config.initUp2dateConfig()
     while fault_count - expired_token < cfg['networkRetries']:
         try:
             ret = getattr(get_server_obj, function_name)(*params)
         except rpclib.xmlrpclib.ProtocolError:
             e = sys.exc_info()[1]
             # We have two codes to check: the HTTP error code, and the
             # combination (failtCode, faultString) encoded in the headers
             # of the request.
             http_error_code = e.errcode
             fault_code, fault_string = rpclib.reportError(e.headers)
             fault_count += 1
             if http_error_code == 401 and fault_code == -34:
                 # Login token expired
                 get_server_obj = self.login(force=1)
                 # allow exactly one respin for expired token
                 expired_token = 1
                 continue
             if http_error_code == 404 and fault_code == -17:
                 # File not found
                 self.extinctErrorYN = 1
                 return None
             log(-1, 'ERROR: http error code :%s; fault code: %s; %s' %
                 (http_error_code, fault_code, fault_string))
             # XXX
             raise
         else:
             return ret
     raise Exception("Failed after multiple attempts!")
Exemple #4
0
 def endContainerCallback(self):
     arches = self.arches.keys()
     arches.sort()
     if arches:
         for arch in arches:
             syncLib.log(6, '   parsed arch: %s' % (arch))
     diskImportLib.ChannelPackageArchCompatContainer.endContainerCallback(self)
 def _rpc_call(self, function_name, params):
     get_server_obj = self.login()
     # Try a couple of times
     fault_count = 0
     expired_token = 0
     cfg = config.initUp2dateConfig()
     while fault_count - expired_token < cfg['networkRetries']:
         try:
             ret = getattr(get_server_obj, function_name)(*params)
         except rpclib.xmlrpclib.ProtocolError:
             e = sys.exc_info()[1]
             # We have two codes to check: the HTTP error code, and the
             # combination (failtCode, faultString) encoded in the headers
             # of the request.
             http_error_code = e.errcode
             fault_code, fault_string = rpclib.reportError(e.headers)
             fault_count += 1
             if http_error_code == 401 and fault_code == -34:
                 # Login token expired
                 get_server_obj = self.login(force=1)
                 # allow exactly one respin for expired token
                 expired_token = 1
                 continue
             if http_error_code == 404 and fault_code == -17:
                 # File not found
                 self.extinctErrorYN = 1
                 return None
             log(-1, 'ERROR: http error code :%s; fault code: %s; %s' %
                 (http_error_code, fault_code, fault_string))
             # XXX
             raise
         else:
             return ret
     raise Exception("Failed after multiple attempts!")
Exemple #6
0
 def download(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     #log(2, '   +++ Satellite synchronization tool downloading certificate.')
     try:
         cert = self._xmlrpc("certificate.download", (self.systemid, ))
     except rpclib.xmlrpclib.Fault, e:
         log(-1, '   --- Unable to download the satellite certificate')
         log(-1, '   ERROR: %s' % e, stream=sys.stderr)
         sys.exit(-1)
Exemple #7
0
 def download(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     #log(2, '   +++ Satellite synchronization tool downloading certificate.')
     try:
         cert = self._xmlrpc("certificate.download", (self.systemid, ))
     except rpclib.xmlrpclib.Fault, e:
         log(-1, '   --- Unable to download the satellite certificate')
         log(-1, '   ERROR: %s' % e, stream=sys.stderr)
         sys.exit(-1)
Exemple #8
0
 def _xmlrpc(function, params):
     try:
         retval = getattr(BaseWireSource.serverObj, function)(*params)
     except TypeError, e:
         log(
             -1,
             'ERROR: during "getattr(BaseWireSource.serverObj, %s)(*(%s))"'
             % (function, params))
         raise
Exemple #9
0
def purge_extra_channel_families():
    # Get rid of the extra channel families
    try:
        # Purge all unused private channel families with null org
        h = rhnSQL.prepare(_query_purge_private_channel_families)
        h.execute()
    except rhnSQL.SQLError, e:
        # Log it and move on - maybe we missed a FK; no reason to break the
        # sync completely for this.
        syncLib.log(-1, str(e))
Exemple #10
0
def purge_extra_channel_families():
    # Get rid of the extra channel families
    try:
        # Purge all unused private channel families with null org
        h = rhnSQL.prepare(_query_purge_private_channel_families)
        h.execute()
    except rhnSQL.SQLError, e:
        # Log it and move on - maybe we missed a FK; no reason to break the
        # sync completely for this.
        syncLib.log(-1, str(e))
 def checkAuth(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     authYN = None
     log(2, "   +++ Satellite synchronization tool checking in.")
     try:
         authYN = self._xmlrpc("authentication.check", (self.systemid,))
     except (rpclib.ProtocolError, rpclib.Fault), e:
         # bug 141197: the logging of all exceptions is handled higher up in
         # the call stack
         #            log2(-1, 1, '   ERROR: %s' % e, stream=sys.stderr)
         raise
Exemple #12
0
 def _xmlrpc(function, params):
     try:
         retval = getattr(BaseWireSource.serverObj, function)(*params)
     except TypeError:
         e = sys.exc_info()[1]
         log(-1, 'ERROR: during "getattr(BaseWireSource.serverObj, %s)(*(%s))"' % (function, params))
         raise
     except rpclib.xmlrpclib.ProtocolError:
         e = sys.exc_info()[1]
         log2(-1, 2, 'ERROR: ProtocolError: %s' % e, stream=sys.stderr)
         raise
     return retval
Exemple #13
0
 def _xmlrpc(function, params):
     try:
         retval = getattr(BaseWireSource.serverObj, function)(*params)
     except TypeError:
         e = sys.exc_info()[1]
         log(-1, 'ERROR: during "getattr(BaseWireSource.serverObj, %s)(*(%s))"' % (function, params))
         raise
     except rpclib.xmlrpclib.ProtocolError:
         e = sys.exc_info()[1]
         log2(-1, 2, 'ERROR: ProtocolError: %s' % e, stream=sys.stderr)
         raise
     return retval
Exemple #14
0
def import_channels(channels, orgid=None):
    collection = ChannelCollection()
    batch = []
    import satCerts
    orgs = map(lambda a: a['id'], satCerts.get_all_orgs())
    for c in channels:
        try:
            timestamp = collection.get_channel_timestamp(c)
        except KeyError:
            raise Exception, "Could not find channel %s" % c
        c_obj = collection.get_channel(c, timestamp)
        if c_obj is None:
            raise Exception, "Channel not found in cache: %s" % c

        # Check to see if we're asked to sync to an orgid,
        # make sure the org from the export is not null org,
        # finally if the orgs differ so we might wanna use
        # requested org's channel-family.
        # TODO: Move these checks somewhere more appropriate
        if not orgid and c_obj['org_id'] is not None:
            #If the src org is not present default to org 1
            orgid = DEFAULT_ORG
        if orgid is not None and c_obj['org_id'] is not None and \
            c_obj['org_id'] != orgid:
            #Only set the channel family if its a custom channel
            c_obj['org_id'] = orgid
            for family in c_obj['families']:
                family['label'] = 'private-channel-family-' + \
                                           str(c_obj['org_id'])

        syncLib.log(6, "Syncing Channel %s to Org %s " % \
                       (c_obj['label'], c_obj['org_id']))
        batch.append(c_obj)

    importer = channelImport.ChannelImport(batch, diskImportLib.get_backend())
    # Don't commit just yet
    importer.will_commit = 0
    importer.run()
    return importer
Exemple #15
0
    def _set_ssl_trusted_certs(self, serverObj):
        if not self.sslYN:
            return None

        # Check certificate
        if CFG.ISS_PARENT:
            caChain = CFG.ISS_CA_CHAIN
        else:
            caChain = CFG.CA_CHAIN
        if caChain:
            # require RHNS-CA-CERT file to be able to authenticate the SSL
            # connections.
            if not os.access(caChain, os.R_OK):
                message = "ERROR: can not find RHN CA file: %s" % caChain
                log(-1, message, stream=sys.stderr)
                raise Exception(message)
            # force the validation of the SSL cert
            serverObj.add_trusted_cert(caChain)
            return caChain

        message = '--- Warning: SSL connection made but no CA certificate used'
        log(1, message, stream=sys.stderr)
        return None
Exemple #16
0
    def _set_ssl_trusted_certs(self, serverObj):
        if not self.sslYN:
            return None

        # Check certificate
        if CFG.ISS_PARENT:
            caChain = CFG.ISS_CA_CHAIN
        else:
            caChain = CFG.CA_CHAIN
        if caChain:
            # require RHNS-CA-CERT file to be able to authenticate the SSL
            # connections.
            if not os.access(caChain, os.R_OK):
                message = "ERROR: can not find RHN CA file: %s" % caChain
                log(-1, message, stream=sys.stderr)
                raise Exception(message)
            # force the validation of the SSL cert
            serverObj.add_trusted_cert(caChain)
            return caChain

        message = '--- Warning: SSL connection made but no CA certificate used'
        log(1, message, stream=sys.stderr)
        return None
Exemple #17
0
    def import_groups(self, plug, url):
        groupsfile = plug.get_groups()
        if groupsfile:
            basename = os.path.basename(groupsfile)
            log(0, "Repo %s has comps file %s." % (url, basename))
            relativedir = os.path.join(relative_comps_dir, self.channel_label)
            absdir = os.path.join(CFG.MOUNT_POINT, relativedir)
            if not os.path.exists(absdir):
                os.makedirs(absdir)
            relativepath = os.path.join(relativedir, basename)
            abspath = os.path.join(absdir, basename)
            for suffix in ['.gz', '.bz', '.xz']:
                if basename.endswith(suffix):
                    abspath = abspath.rstrip(suffix)
                    relativepath = relativepath.rstrip(suffix)
            src = fileutils.decompress_open(groupsfile)
            dst = open(abspath, "w")
            shutil.copyfileobj(src, dst)
            dst.close()
            src.close()
            # update or insert
            hu = rhnSQL.prepare("""update rhnChannelComps
                                      set relative_filename = :relpath,
                                          modified = current_timestamp
                                    where channel_id = :cid""")
            hu.execute(cid=self.channel['id'], relpath=relativepath)

            hi = rhnSQL.prepare("""insert into rhnChannelComps
                                  (id, channel_id, relative_filename)
                                  (select sequence_nextval('rhn_channelcomps_id_seq'),
                                          :cid,
                                          :relpath
                                     from dual
                                    where not exists (select 1 from rhnChannelComps
                                                       where channel_id = :cid))"""
                                )
            hi.execute(cid=self.channel['id'], relpath=relativepath)
Exemple #18
0
    def import_groups(self, plug, url):
        groupsfile = plug.get_groups()
        if groupsfile:
            basename = os.path.basename(groupsfile)
            log(0, "Repo %s has comps file %s." % (url, basename))
            relativedir = os.path.join(relative_comps_dir, self.channel_label)
            absdir = os.path.join(CFG.MOUNT_POINT, relativedir)
            if not os.path.exists(absdir):
                os.makedirs(absdir)
            relativepath = os.path.join(relativedir, basename)
            abspath = os.path.join(absdir, basename)
            for suffix in ['.gz', '.bz', '.xz']:
                if basename.endswith(suffix):
                    abspath = abspath.rstrip(suffix)
                    relativepath = relativepath.rstrip(suffix)
            src = fileutils.decompress_open(groupsfile)
            dst = open(abspath, "w")
            shutil.copyfileobj(src, dst)
            dst.close()
            src.close()
            # update or insert
            hu = rhnSQL.prepare("""update rhnChannelComps
                                      set relative_filename = :relpath,
                                          modified = current_timestamp
                                    where channel_id = :cid""")
            hu.execute(cid=self.channel['id'], relpath=relativepath)

            hi = rhnSQL.prepare("""insert into rhnChannelComps
                                  (id, channel_id, relative_filename)
                                  (select sequence_nextval('rhn_channelcomps_id_seq'),
                                          :cid,
                                          :relpath
                                     from dual
                                    where not exists (select 1 from rhnChannelComps
                                                       where channel_id = :cid))""")
            hi.execute(cid=self.channel['id'], relpath=relativepath)
Exemple #19
0
 def checkAuth(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     authYN = None
     log(2, '   +++ SUSE Manager Server synchronization tool checking in.')
     try:
         authYN = self._xmlrpc('authentication.check', (self.systemid,))
     except (rpclib.xmlrpclib.ProtocolError, rpclib.xmlrpclib.Fault):
         raise
     if authYN:
         log(2, '   +++ Entitled SUSE Manager Server validated.', stream=sys.stderr)
     elif authYN is None:
         log(-1, '   --- An error occurred upon authentication of this SUSE Manager Server -- '
                 'review the pertinent log file (%s) and/or submit a service request.' % CFG.LOG_FILE,
             stream=sys.stderr)
         sys.exit(-1)
     elif authYN == 0:
         log(-1, '   --- This server is not entitled.', stream=sys.stderr)
         sys.exit(-1)
     return authYN
Exemple #20
0
 def checkAuth(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     authYN = None
     log(2, '   +++ Satellite synchronization tool checking in.')
     try:
         authYN = self._xmlrpc('authentication.check', (self.systemid,))
     except (rpclib.xmlrpclib.ProtocolError, rpclib.xmlrpclib.Fault):
         raise
     if authYN:
         log(2, '   +++ Entitled satellite validated.', stream=sys.stderr)
     elif authYN is None:
         log(-1, '   --- An error occurred upon authentication of this satellite -- '
                 'review the pertinent log file (%s) and/or alert RHN at [email protected].' % CFG.LOG_FILE,
             stream=sys.stderr)
         sys.exit(-1)
     elif authYN == 0:
         log(-1, '   --- This server is not an entitled satellite.', stream=sys.stderr)
         sys.exit(-1)
     return authYN
Exemple #21
0
 def checkAuth(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     authYN = None
     log(2, '   +++ Satellite synchronization tool checking in.')
     try:
         authYN = self._xmlrpc('authentication.check', (self.systemid,))
     except (rpclib.xmlrpclib.ProtocolError, rpclib.xmlrpclib.Fault):
         raise
     if authYN:
         log(2, '   +++ Entitled satellite validated.', stream=sys.stderr)
     elif authYN is None:
         log(-1, '   --- An error occurred upon authentication of this satellite -- '
                 'review the pertinent log file (%s) and/or alert RHN at [email protected].' % CFG.LOG_FILE,
             stream=sys.stderr)
         sys.exit(-1)
     elif authYN == 0:
         log(-1, '   --- This server is not an entitled satellite.', stream=sys.stderr)
         sys.exit(-1)
     return authYN
Exemple #22
0
 def checkAuth(self):
     self.setServer(CFG.RHN_XMLRPC_HANDLER)
     authYN = None
     log(2, '   +++ SUSE Manager Server synchronization tool checking in.')
     try:
         authYN = self._xmlrpc('authentication.check', (self.systemid, ))
     except (rpclib.xmlrpclib.ProtocolError, rpclib.xmlrpclib.Fault):
         raise
     if authYN:
         log(2,
             '   +++ Entitled SUSE Manager Server validated.',
             stream=sys.stderr)
     elif authYN is None:
         log(-1,
             '   --- An error occurred upon authentication of this SUSE Manager Server -- '
             'review the pertinent log file (%s) and/or submit a service request.'
             % CFG.LOG_FILE,
             stream=sys.stderr)
         sys.exit(-1)
     elif authYN == 0:
         log(-1, '   --- This server is not entitled.', stream=sys.stderr)
         sys.exit(-1)
     return authYN
Exemple #23
0
    def import_kickstart(self, plug, repo_label):
        ks_path = 'rhn/kickstart/'
        ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_'))
        if len(ks_tree_label) < 4:
            ks_tree_label += "_repo"

        # construct ks_path and check we already have this KS tree synced
        id_request = """
                select id
                from rhnKickstartableTree
                where channel_id = :channel_id and label = :label
                """

        if 'org_id' in self.channel and self.channel['org_id']:
            ks_path += str(self.channel['org_id']) + '/' + ks_tree_label
            # Trees synced from external repositories are expected to have full path it database
            db_path = os.path.join(CFG.MOUNT_POINT, ks_path)
            row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'],
                                       label=ks_tree_label, org_id=self.channel['org_id'])
        else:
            ks_path += ks_tree_label
            db_path = ks_path
            row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'],
                                       label=ks_tree_label)

        treeinfo_path = ['treeinfo', '.treeinfo']
        treeinfo_parser = None
        for path in treeinfo_path:
            log(1, "Trying " + path)
            treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path))
            if treeinfo:
                try:
                    treeinfo_parser = TreeInfoParser(treeinfo)
                    break
                except TreeInfoError:
                    pass

        if not treeinfo_parser:
            log(0, "Kickstartable tree not detected (no valid treeinfo file)")
            return

        if self.ks_install_type is None:
            family = treeinfo_parser.get_family()
            if family == 'Fedora':
                self.ks_install_type = 'fedora18'
            elif family == 'CentOS':
                self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version()
            else:
                self.ks_install_type = 'generic_rpm'

        # Make sure images are included
        to_download = []
        for repo_path in treeinfo_parser.get_images():
            local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)
            # TODO: better check
            if not os.path.exists(local_path):
                to_download.append(repo_path)

        if row:
            log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label)
            ks_id = row['id']
        else:
            row = rhnSQL.fetchone_dict("""
                select sequence_nextval('rhn_kstree_id_seq') as id from dual
                """)
            ks_id = row['id']

            rhnSQL.execute("""
                       insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type,
                                                         install_type, last_modified, created, modified)
                       values (:id, :org_id, :label, :base_path, :channel_id,
                                 ( select id from rhnKSTreeType where label = :ks_tree_type),
                                 ( select id from rhnKSInstallType where label = :ks_install_type),
                                 current_timestamp, current_timestamp, current_timestamp)""", id=ks_id,
                           org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path,
                           channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type,
                           ks_install_type=self.ks_install_type)

            log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label)

        insert_h = rhnSQL.prepare("""
                insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created,
                 modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size,
                 epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp)
        """)

        delete_h = rhnSQL.prepare("""
                delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path
        """)

        # Downloading/Updating content of KS Tree
        # start from root dir
        dirs_queue = ['']
        log(0, "Gathering all files in kickstart repository...")
        while len(dirs_queue) > 0:
            cur_dir_name = dirs_queue.pop(0)
            cur_dir_html = plug.get_file(cur_dir_name)
            if cur_dir_html is None:
                continue

            parser = KSDirParser(cur_dir_html)

            for ks_file in parser.get_content():
                repo_path = cur_dir_name + ks_file['name']
                # if this is a directory, just add a name into queue (like BFS algorithm)
                if ks_file['type'] == 'DIR':
                    dirs_queue.append(repo_path)
                    continue

                if repo_path not in to_download:
                    to_download.append(repo_path)

        if to_download:
            log(0, "Downloading %d files." % len(to_download))
            for item in to_download:
                for retry in range(3):
                    try:
                        log(1, "Retrieving %s" % item)
                        plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path))
                        st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item))
                        break
                    except OSError:  # os.stat if the file wasn't downloaded
                        if retry < 3:
                            log(2, "Retry download %s: attempt #%d" % (item, retry + 1))
                        else:
                            raise
                # update entity about current file in a database
                delete_h.execute(id=ks_id, path=item)
                insert_h.execute(id=ks_id, path=item,
                                 checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)),
                                 st_size=st.st_size, st_time=st.st_mtime)
        else:
            log(0, "Nothing to download.")

        rhnSQL.commit()
Exemple #24
0
 def endItemCallback(self):
     xmlSource.ChannelFamilyContainer.endItemCallback(self)
     if not self.batch:
         return
     syncLib.log(2, '   parsing family: %s' % (self.batch[-1]['name']))
Exemple #25
0
    def __init__(self, channel_label, repo_type, url=None, fail=False,
                 filters=None, no_errata=False, sync_kickstart=False, latest=False,
                 metadata_only=False, strict=0, excluded_urls=None, no_packages=False,
                 log_dir="reposync", log_level=None):
        self.regen = False
        self.fail = fail
        self.filters = filters or []
        self.no_packages = no_packages
        self.no_errata = no_errata
        self.sync_kickstart = sync_kickstart
        self.latest = latest
        self.metadata_only = metadata_only
        self.ks_tree_type = 'externally-managed'
        self.ks_install_type = 'generic_rpm'

        initCFG('server.satellite')
        rhnSQL.initDB()

        # setup logging
        log_filename = channel_label + '.log'
        log_path = default_log_location + log_dir + '/' + log_filename
        if log_level is None:
            log_level = 0
        CFG.set('DEBUG', log_level)
        rhnLog.initLOG(log_path, log_level)
        # os.fchown isn't in 2.4 :/
        if isSUSE():
            os.system("chgrp www " + log_path)
        else:
            os.system("chgrp apache " + log_path)

        log2disk(0, "Command: %s" % str(sys.argv))
        log2disk(0, "Sync of channel started.")

        self.channel_label = channel_label
        self.channel = self.load_channel()
        if not self.channel:
            log(0, "Channel %s does not exist." % channel_label)

        if not url:
            # TODO:need to look at user security across orgs
            h = rhnSQL.prepare("""select s.id, s.source_url, s.label, fm.channel_family_id
                                  from rhnContentSource s,
                                       rhnChannelContentSource cs,
                                       rhnChannelFamilyMembers fm
                                 where s.id = cs.source_id
                                   and cs.channel_id = fm.channel_id
                                   and cs.channel_id = :channel_id""")
            h.execute(channel_id=int(self.channel['id']))
            source_data = h.fetchall_dict()
            self.urls = []
            if excluded_urls is None:
                excluded_urls = []
            if source_data:
                for row in source_data:
                    if row['source_url'] not in excluded_urls:
                        self.urls.append((row['id'], row['source_url'], row['label'], row['channel_family_id']))
        else:
            self.urls = [(None, u, None, None) for u in url]

        if not self.urls:
            log2stderr(0, "Channel %s has no URL associated" % channel_label)

        self.repo_plugin = self.load_plugin(repo_type)
        self.strict = strict
        self.all_packages = []
 def _xmlrpc(self, function, params):
     try:
         retval = apply(getattr(BaseWireSource.serverObj, function), params)
     except TypeError, e:
         log(-1, 'ERROR: during "apply(getattr(BaseWireSource.serverObj, %s), %s)"' % (function, params))
         raise
Exemple #27
0
    def sync(self, update_repodata=False):
        """Trigger a reposync"""
        start_time = datetime.now()
        for (repo_id, url, repo_label) in self.urls:
            log(0, "Repo URL: %s" % url)
            plugin = None

            # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line
            if url.startswith("uln://"):
                self.repo_plugin = self.load_plugin("uln")

            # pylint: disable=W0703
            try:
                # use modified relative_url as name of repo plugin, because
                # it used as name of cache directory as well

                relative_url = '_'.join(url.split('://')[1].split('/')[1:])
                plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_")

                plugin = self.repo_plugin(url, plugin_name)

                if update_repodata:
                    plugin.clear_cache()

                if repo_id is not None:
                    keys = rhnSQL.fetchone_dict("""
                        select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key
                        from rhncontentsource cs
                                join rhncryptokey k1
                                on cs.ssl_ca_cert_id = k1.id
                                left outer join rhncryptokey k2
                                on cs.ssl_client_cert_id = k2.id
                                left outer join rhncryptokey k3
                                on cs.ssl_client_key_id = k3.id
                        where cs.id = :repo_id
                        """, repo_id=int(repo_id))
                    if keys and ('ca_cert' in keys):
                        plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key'])

                if not self.no_packages:
                    self.import_packages(plugin, repo_id, url)
                    self.import_groups(plugin, url)

                if not self.no_errata:
                    self.import_updates(plugin, url)

                # only for repos obtained from the DB
                if self.sync_kickstart and repo_label:
                    try:
                        self.import_kickstart(plugin, repo_label)
                    except:
                        rhnSQL.rollback()
                        raise
            except Exception:
                e = sys.exc_info()[1]
                log2stderr(0, "ERROR: %s" % e)
            if plugin is not None:
                plugin.clear_ssl_cache()
        if self.regen:
            taskomatic.add_to_repodata_queue_for_channel_package_subscription(
                [self.channel_label], [], "server.app.yumreposync")
            taskomatic.add_to_erratacache_queue(self.channel_label)
        self.update_date()
        rhnSQL.commit()
        elapsed_time = datetime.now() - start_time
        log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0])
        return elapsed_time
Exemple #28
0
    def upload_updates(self, notices):
        batch = []
        typemap = {
            'security': 'Security Advisory',
            'recommended': 'Bug Fix Advisory',
            'bugfix': 'Bug Fix Advisory',
            'optional': 'Product Enhancement Advisory',
            'feature': 'Product Enhancement Advisory',
            'enhancement': 'Product Enhancement Advisory'
        }
        for notice in notices:
            notice = self.fix_notice(notice)
            advisory = notice['update_id'] + '-' + notice['version']
            existing_errata = self.get_errata(notice['update_id'])

            e = Erratum()
            e['errata_from'] = notice['from']
            e['advisory'] = advisory
            e['advisory_name'] = notice['update_id']
            e['advisory_rel'] = notice['version']
            e['advisory_type'] = typemap.get(notice['type'], 'Product Enhancement Advisory')
            e['product'] = notice['release'] or 'Unknown'
            e['description'] = notice['description']
            e['synopsis'] = notice['title'] or notice['update_id']
            if (notice['type'] == 'security' and notice['severity'] and
                    not e['synopsis'].startswith(notice['severity'] + ': ')):
                e['synopsis'] = notice['severity'] + ': ' + e['synopsis']
            if 'summary' in notice and not notice['summary'] is None:
                e['topic'] = notice['summary']
            else:
                e['topic'] = ' '
            if 'solution' in notice and not notice['solution'] is None:
                e['solution'] = notice['solution']
            else:
                e['solution'] = ' '
            e['issue_date'] = self._to_db_date(notice['issued'])
            if notice['updated']:
                e['update_date'] = self._to_db_date(notice['updated'])
            else:
                e['update_date'] = self._to_db_date(notice['issued'])
            e['org_id'] = self.channel['org_id']
            e['notes'] = ''
            e['channels'] = []
            e['packages'] = []
            e['files'] = []
            if existing_errata:
                e['channels'] = existing_errata['channels']
                e['packages'] = existing_errata['packages']
            e['channels'].append({'label': self.channel_label})

            for pkg in notice['pkglist'][0]['packages']:
                param_dict = {
                    'name': pkg['name'],
                    'version': pkg['version'],
                    'release': pkg['release'],
                    'arch': pkg['arch'],
                    'channel_id': int(self.channel['id']),
                }
                if pkg['epoch'] == '0':
                    epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')"
                elif pkg['epoch'] is None or pkg['epoch'] == '':
                    epochStatement = "pevr.epoch is NULL"
                else:
                    epochStatement = "pevr.epoch = :epoch"
                    param_dict['epoch'] = pkg['epoch']
                if self.channel['org_id']:
                    param_dict['org_id'] = self.channel['org_id']
                    orgStatement = "= :org_id"
                else:
                    orgStatement = "is NULL"

                h = rhnSQL.prepare("""
                    select p.id, pevr.epoch, c.checksum, c.checksum_type
                      from rhnPackage p
                      join rhnPackagename pn on p.name_id = pn.id
                      join rhnpackageevr pevr on p.evr_id = pevr.id
                      join rhnpackagearch pa on p.package_arch_id = pa.id
                      join rhnArchType at on pa.arch_type_id = at.id
                      join rhnChecksumView c on p.checksum_id = c.id
                      join rhnChannelPackage cp on p.id = cp.package_id
                     where pn.name = :name
                       and p.org_id %s
                       and pevr.version = :version
                       and pevr.release = :release
                       and pa.label = :arch
                       and %s
                       and at.label = 'rpm'
                       and cp.channel_id = :channel_id
                """ % (orgStatement, epochStatement))
                h.execute(**param_dict)
                cs = h.fetchone_dict() or None

                if not cs:
                    if 'epoch' in param_dict:
                        epoch = param_dict['epoch'] + ":"
                    else:
                        epoch = ""
                    log(2, "No checksum found for %s-%s%s-%s.%s."
                           " Skipping Package" % (param_dict['name'],
                                                  epoch,
                                                  param_dict['version'],
                                                  param_dict['release'],
                                                  param_dict['arch']))
                    continue

                newpkgs = []
                for oldpkg in e['packages']:
                    if oldpkg['package_id'] != cs['id']:
                        newpkgs.append(oldpkg)

                package = IncompletePackage().populate(pkg)
                package['epoch'] = cs['epoch']
                package['org_id'] = self.channel['org_id']

                package['checksums'] = {cs['checksum_type']: cs['checksum']}
                package['checksum_type'] = cs['checksum_type']
                package['checksum'] = cs['checksum']

                package['package_id'] = cs['id']
                newpkgs.append(package)

                e['packages'] = newpkgs

            if len(e['packages']) == 0:
                # FIXME: print only with higher debug option
                log(2, "Advisory %s has empty package list." % e['advisory_name'])

            e['keywords'] = []
            if notice['reboot_suggested']:
                kw = Keyword()
                kw.populate({'keyword': 'reboot_suggested'})
                e['keywords'].append(kw)
            if notice['restart_suggested']:
                kw = Keyword()
                kw.populate({'keyword': 'restart_suggested'})
                e['keywords'].append(kw)
            e['bugs'] = []
            e['cve'] = []
            if notice['references']:
                bzs = [r for r in notice['references'] if r['type'] == 'bugzilla']
                if len(bzs):
                    tmp = {}
                    for bz in bzs:
                        try:
                            bz_id = int(bz['id'])
                        # This can happen in some incorrectly generated updateinfo, let's be smart
                        except ValueError:
                            log(2, "Bugzilla assigned to advisory %s has invalid id: %s, trying to get it from URL..."
                                % (e['advisory_name'], bz['id']))
                            bz_id = int(re.search(r"\d+$", bz['href']).group(0))
                        if bz_id not in tmp:
                            bug = Bug()
                            bug.populate({'bug_id': bz_id, 'summary': bz['title'], 'href': bz['href']})
                            e['bugs'].append(bug)
                            tmp[bz_id] = None
                cves = [r for r in notice['references'] if r['type'] == 'cve']
                if len(cves):
                    tmp = {}
                    for cve in cves:
                        if cve['id'] not in tmp:
                            e['cve'].append(cve['id'])
                            tmp[cve['id']] = None
                others = [r for r in notice['references'] if not r['type'] == 'bugzilla' and not r['type'] == 'cve']
                if len(others):
                    tmp = len(others)
                    refers_to = ""
                    for other in others:
                        if refers_to:
                            refers_to += "\n"
                        refers_to += other['href']
                    e['refers_to'] = refers_to
            e['locally_modified'] = None
            batch.append(e)

        backend = SQLBackend()
        importer = ErrataImport(batch, backend)
        importer.run()
        self.regen = True
Exemple #29
0
    def import_packages(self, plug, source_id, url):
        failed_packages = 0
        if (not self.filters) and source_id:
            h = rhnSQL.prepare("""
                    select flag, filter
                      from rhnContentSourceFilter
                     where source_id = :source_id
                     order by sort_order """)
            h.execute(source_id=source_id)
            filter_data = h.fetchall_dict() or []
            filters = [(row['flag'], re.split(r'[,\s]+', row['filter']))
                       for row in filter_data]
        else:
            filters = self.filters

        packages = plug.list_packages(filters, self.latest)
        self.all_packages.extend(packages)
        to_process = []
        num_passed = len(packages)
        log(0, "Packages in repo:             %5d" % plug.num_packages)
        if plug.num_excluded:
            log(0, "Packages passed filter rules: %5d" % num_passed)
        channel_id = int(self.channel['id'])

        for pack in packages:
            db_pack = rhnPackage.get_info_for_package(
                [pack.name, pack.version, pack.release, pack.epoch, pack.arch],
                channel_id, self.org_id)

            to_download = True
            to_link = True
            # Package exists in DB
            if db_pack:
                # Path in filesystem is defined
                if db_pack['path']:
                    pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path'])
                else:
                    pack.path = ""

                if self.metadata_only or self.match_package_checksum(
                        db_pack['path'], pack.path, pack.checksum_type,
                        pack.checksum):
                    # package is already on disk or not required
                    to_download = False
                    if db_pack['channel_id'] == channel_id:
                        # package is already in the channel
                        to_link = False

                    # just pass data from DB, they will be used in strict channel
                    # linking if there is no new RPM downloaded
                    pack.checksum = db_pack['checksum']
                    pack.checksum_type = db_pack['checksum_type']
                    pack.epoch = db_pack['epoch']

                elif db_pack['channel_id'] == channel_id:
                    # different package with SAME NVREA
                    self.disassociate_package(db_pack)

            if to_download or to_link:
                to_process.append((pack, to_download, to_link))

        num_to_process = len(to_process)
        if num_to_process == 0:
            log(0, "No new packages to sync.")
            # If we are just appending, we can exit
            if not self.strict:
                return failed_packages
        else:
            log(
                0, "Packages already synced:      %5d" %
                (num_passed - num_to_process))
            log(0, "Packages to sync:             %5d" % num_to_process)

        is_non_local_repo = (url.find("file:/") < 0)

        downloader = ThreadedDownloader()
        to_download_count = 0
        for what in to_process:
            pack, to_download, to_link = what
            if to_download:
                target_file = os.path.join(
                    plug.repo.pkgdir,
                    os.path.basename(pack.unique_id.relativepath))
                pack.path = target_file
                params = {}
                if self.metadata_only:
                    bytes_range = (0, pack.unique_id.hdrend)
                    checksum_type = None
                    checksum = None
                else:
                    bytes_range = None
                    checksum_type = pack.checksum_type
                    checksum = pack.checksum
                plug.set_download_parameters(params,
                                             pack.unique_id.relativepath,
                                             target_file,
                                             checksum_type=checksum_type,
                                             checksum_value=checksum,
                                             bytes_range=bytes_range)
                downloader.add(params)
                to_download_count += 1
        if num_to_process != 0:
            log(0, "New packages to download:     %5d" % to_download_count)
        logger = TextLogger(None, to_download_count)
        downloader.set_log_obj(logger)
        downloader.run()

        log2disk(0, "Importing packages started.")
        progress_bar = ProgressBarLogger("Importing packages:    ",
                                         to_download_count)
        for (index, what) in enumerate(to_process):
            pack, to_download, to_link = what
            if not to_download:
                continue
            localpath = pack.path
            # pylint: disable=W0703
            try:
                if os.path.exists(localpath):
                    pack.load_checksum_from_header()
                    rel_package_path = pack.upload_package(
                        self.org_id, metadata_only=self.metadata_only)
                    # Save uploaded package to cache with repository checksum type
                    if rel_package_path:
                        self.checksum_cache[rel_package_path] = {
                            pack.checksum_type: pack.checksum
                        }

                    # we do not want to keep a whole 'a_pkg' object for every package in memory,
                    # because we need only checksum. see BZ 1397417
                    pack.checksum = pack.a_pkg.checksum
                    pack.checksum_type = pack.a_pkg.checksum_type
                    pack.epoch = pack.a_pkg.header['epoch']
                    pack.a_pkg = None
                else:
                    raise Exception
                progress_bar.log(True, None)
            except KeyboardInterrupt:
                raise
            except Exception:
                failed_packages += 1
                e = str(sys.exc_info()[1])
                if e:
                    log2(0, 1, e, stream=sys.stderr)
                if self.fail:
                    raise
                to_process[index] = (pack, False, False)
                self.all_packages.remove(pack)
                progress_bar.log(False, None)
            finally:
                if is_non_local_repo and localpath and os.path.exists(
                        localpath):
                    os.remove(localpath)
        log2disk(0, "Importing packages finished.")

        if self.strict:
            # Need to make sure all packages from all repositories are associated with channel
            import_batch = [
                self.associate_package(pack) for pack in self.all_packages
            ]
        else:
            # Only packages from current repository are appended to channel
            import_batch = [
                self.associate_package(pack)
                for (pack, to_download, to_link) in to_process if to_link
            ]
        # Do not re-link if nothing was marked to link
        if any([to_link for (pack, to_download, to_link) in to_process]):
            log(0, "Linking packages to channel.")
            backend = SQLBackend()
            caller = "server.app.yumreposync"
            importer = ChannelPackageSubscription(import_batch,
                                                  backend,
                                                  caller=caller,
                                                  repogen=False,
                                                  strict=self.strict)
            importer.run()
            backend.commit()
            self.regen = True
        return failed_packages
Exemple #30
0
    def upload_updates(self, notices):
        batch = []
        typemap = {
            'security': 'Security Advisory',
            'recommended': 'Bug Fix Advisory',
            'bugfix': 'Bug Fix Advisory',
            'optional': 'Product Enhancement Advisory',
            'feature': 'Product Enhancement Advisory',
            'enhancement': 'Product Enhancement Advisory'
        }
        channel_advisory_names = self.list_errata()
        for notice in notices:
            notice = self.fix_notice(notice)

            if not self.force_all_errata and notice[
                    'update_id'] in channel_advisory_names:
                continue

            advisory = notice['update_id'] + '-' + notice['version']
            existing_errata = self.get_errata(notice['update_id'])
            e = Erratum()
            e['errata_from'] = notice['from']
            e['advisory'] = advisory
            e['advisory_name'] = notice['update_id']
            e['advisory_rel'] = notice['version']
            e['advisory_type'] = typemap.get(notice['type'],
                                             'Product Enhancement Advisory')
            e['product'] = notice['release'] or 'Unknown'
            e['description'] = notice['description']
            e['synopsis'] = notice['title'] or notice['update_id']
            if (notice['type'] == 'security' and notice['severity'] and
                    not e['synopsis'].startswith(notice['severity'] + ': ')):
                e['synopsis'] = notice['severity'] + ': ' + e['synopsis']
            if 'summary' in notice and not notice['summary'] is None:
                e['topic'] = notice['summary']
            else:
                e['topic'] = ' '
            if 'solution' in notice and not notice['solution'] is None:
                e['solution'] = notice['solution']
            else:
                e['solution'] = ' '
            e['issue_date'] = self._to_db_date(notice['issued'])
            if notice['updated']:
                e['update_date'] = self._to_db_date(notice['updated'])
            else:
                e['update_date'] = self._to_db_date(notice['issued'])
            e['org_id'] = self.org_id
            e['notes'] = ''
            e['channels'] = []
            e['packages'] = []
            e['files'] = []
            if existing_errata:
                e['channels'] = existing_errata['channels']
                e['packages'] = existing_errata['packages']
            e['channels'].append({'label': self.channel_label})

            for pkg in notice['pkglist'][0]['packages']:
                param_dict = {
                    'name': pkg['name'],
                    'version': pkg['version'],
                    'release': pkg['release'],
                    'arch': pkg['arch'],
                    'channel_id': int(self.channel['id']),
                }
                if pkg['epoch'] == '0':
                    epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')"
                elif pkg['epoch'] is None or pkg['epoch'] == '':
                    epochStatement = "pevr.epoch is NULL"
                else:
                    epochStatement = "pevr.epoch = :epoch"
                    param_dict['epoch'] = pkg['epoch']
                if self.org_id:
                    param_dict['org_id'] = self.org_id
                    orgStatement = "= :org_id"
                else:
                    orgStatement = "is NULL"

                h = rhnSQL.prepare("""
                    select p.id, pevr.epoch, c.checksum, c.checksum_type
                      from rhnPackage p
                      join rhnPackagename pn on p.name_id = pn.id
                      join rhnpackageevr pevr on p.evr_id = pevr.id
                      join rhnpackagearch pa on p.package_arch_id = pa.id
                      join rhnArchType at on pa.arch_type_id = at.id
                      join rhnChecksumView c on p.checksum_id = c.id
                      join rhnChannelPackage cp on p.id = cp.package_id
                     where pn.name = :name
                       and p.org_id %s
                       and pevr.version = :version
                       and pevr.release = :release
                       and pa.label = :arch
                       and %s
                       and at.label = 'rpm'
                       and cp.channel_id = :channel_id
                """ % (orgStatement, epochStatement))
                h.execute(**param_dict)
                cs = h.fetchone_dict() or None

                if not cs:
                    if 'epoch' in param_dict:
                        epoch = param_dict['epoch'] + ":"
                    else:
                        epoch = ""
                    log(
                        2, "No checksum found for %s-%s%s-%s.%s."
                        " Skipping Package" %
                        (param_dict['name'], epoch, param_dict['version'],
                         param_dict['release'], param_dict['arch']))
                    continue

                newpkgs = []
                for oldpkg in e['packages']:
                    if oldpkg['package_id'] != cs['id']:
                        newpkgs.append(oldpkg)

                package = IncompletePackage().populate(pkg)
                package['epoch'] = cs['epoch']
                package['org_id'] = self.org_id

                package['checksums'] = {cs['checksum_type']: cs['checksum']}
                package['checksum_type'] = cs['checksum_type']
                package['checksum'] = cs['checksum']

                package['package_id'] = cs['id']
                newpkgs.append(package)

                e['packages'] = newpkgs

            if len(e['packages']) == 0:
                # FIXME: print only with higher debug option
                log(2,
                    "Advisory %s has empty package list." % e['advisory_name'])

            e['keywords'] = []
            if notice['reboot_suggested']:
                kw = Keyword()
                kw.populate({'keyword': 'reboot_suggested'})
                e['keywords'].append(kw)
            if notice['restart_suggested']:
                kw = Keyword()
                kw.populate({'keyword': 'restart_suggested'})
                e['keywords'].append(kw)
            e['bugs'] = []
            e['cve'] = []
            if notice['references']:
                bzs = [
                    r for r in notice['references'] if r['type'] == 'bugzilla'
                ]
                if len(bzs):
                    tmp = {}
                    for bz in bzs:
                        try:
                            bz_id = int(bz['id'])
                        # This can happen in some incorrectly generated updateinfo, let's be smart
                        except ValueError:
                            log(
                                2,
                                "Bugzilla assigned to advisory %s has invalid id: %s, trying to get it from URL..."
                                % (e['advisory_name'], bz['id']))
                            bz_id = int(
                                re.search(r"\d+$", bz['href']).group(0))
                        if bz_id not in tmp:
                            bug = Bug()
                            bug.populate({
                                'bug_id': bz_id,
                                'summary': bz['title'],
                                'href': bz['href']
                            })
                            e['bugs'].append(bug)
                            tmp[bz_id] = None
                cves = [r for r in notice['references'] if r['type'] == 'cve']
                if len(cves):
                    tmp = {}
                    for cve in cves:
                        if cve['id'] not in tmp:
                            e['cve'].append(cve['id'])
                            tmp[cve['id']] = None
                others = [
                    r for r in notice['references']
                    if not r['type'] == 'bugzilla' and not r['type'] == 'cve'
                ]
                if len(others):
                    tmp = len(others)
                    refers_to = ""
                    for other in others:
                        if refers_to:
                            refers_to += "\n"
                        refers_to += other['href']
                    e['refers_to'] = refers_to
            e['locally_modified'] = None
            batch.append(e)

        if batch:
            log(0, "Syncing %s new errata to channel." % len(batch))
            backend = SQLBackend()
            importer = ErrataImport(batch, backend)
            importer.run()
            self.regen = True
        elif notices:
            log(0, "No new errata to sync.")
Exemple #31
0
    def import_packages(self, plug, source_id, url):
        if (not self.filters) and source_id:
            h = rhnSQL.prepare("""
                    select flag, filter
                      from rhnContentSourceFilter
                     where source_id = :source_id
                     order by sort_order """)
            h.execute(source_id=source_id)
            filter_data = h.fetchall_dict() or []
            filters = [(row['flag'], re.split(r'[,\s]+', row['filter']))
                       for row in filter_data]
        else:
            filters = self.filters

        packages = plug.list_packages(filters, self.latest)
        self.all_packages.extend(packages)
        to_process = []
        num_passed = len(packages)
        log(0, "Packages in repo:             %5d" % plug.num_packages)
        if plug.num_excluded:
            log(0, "Packages passed filter rules: %5d" % num_passed)
        channel_id = int(self.channel['id'])
        if self.channel['org_id']:
            self.channel['org_id'] = int(self.channel['org_id'])
        else:
            self.channel['org_id'] = None
        for pack in packages:
            db_pack = rhnPackage.get_info_for_package(
                [pack.name, pack.version, pack.release, pack.epoch, pack.arch],
                channel_id, self.channel['org_id'])

            to_download = True
            to_link = True
            # Package exists in DB
            if db_pack:
                # Path in filesystem is defined
                if db_pack['path']:
                    pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path'])
                else:
                    pack.path = ""

                if self.metadata_only or self.match_package_checksum(pack.path,
                                                                     pack.checksum_type, pack.checksum):
                    # package is already on disk or not required
                    to_download = False
                    if db_pack['channel_id'] == channel_id:
                        # package is already in the channel
                        to_link = False

                elif db_pack['channel_id'] == channel_id:
                    # different package with SAME NVREA
                    self.disassociate_package(db_pack)

                # just pass data from DB, they will be used if there is no RPM available
                pack.checksum = db_pack['checksum']
                pack.checksum_type = db_pack['checksum_type']
                pack.epoch = db_pack['epoch']

            if to_download or to_link:
                to_process.append((pack, to_download, to_link))

        num_to_process = len(to_process)
        if num_to_process == 0:
            log(0, "No new packages to sync.")
            # If we are just appending, we can exit
            if not self.strict:
                return
        else:
            log(0, "Packages already synced:      %5d" % (num_passed - num_to_process))
            log(0, "Packages to sync:             %5d" % num_to_process)

        self.regen = True
        is_non_local_repo = (url.find("file:/") < 0)

        for (index, what) in enumerate(to_process):
            pack, to_download, to_link = what
            localpath = None
            # pylint: disable=W0703
            try:
                log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA()))
                if to_download:
                    pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only)
                    pack.load_checksum_from_header()
                    pack.upload_package(self.channel, metadata_only=self.metadata_only)
            except KeyboardInterrupt:
                raise
            except Exception:
                e = sys.exc_info()[1]
                log2stderr(0, e)
                if self.fail:
                    raise
                to_process[index] = (pack, False, False)
                continue
            finally:
                if is_non_local_repo and localpath and os.path.exists(localpath):
                    os.remove(localpath)

        log(0, "Linking packages to channel.")
        if self.strict:
            import_batch = [self.associate_package(pack)
                            for pack in self.all_packages]
        else:
            import_batch = [self.associate_package(pack)
                            for (pack, to_download, to_link) in to_process
                            if to_link]
        backend = SQLBackend()
        caller = "server.app.yumreposync"
        importer = ChannelPackageSubscription(import_batch,
                                              backend, caller=caller, repogen=False,
                                              strict=self.strict)
        importer.run()
        backend.commit()
Exemple #32
0
 def import_updates(self, plug, url):
     notices = plug.get_updates()
     log(0, "Repo %s has %s errata." % (url, len(notices)))
     if notices:
         self.upload_updates(notices)
Exemple #33
0
    def sync(self, update_repodata=True):
        """Trigger a reposync"""
        failed_packages = 0
        sync_error = 0
        if not self.urls:
            sync_error = -1
        start_time = datetime.now()
        for (repo_id, url, repo_label) in self.urls:
            log(0, "Repo URL: %s" % url)
            plugin = None

            # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line
            if url.startswith("uln://"):
                self.repo_plugin = self.load_plugin("uln")

            # pylint: disable=W0703
            try:
                if repo_label:
                    repo_name = repo_label
                else:
                    # use modified relative_url as name of repo plugin, because
                    # it used as name of cache directory as well
                    relative_url = '_'.join(url.split('://')[1].split('/')[1:])
                    repo_name = relative_url.replace("?", "_").replace(
                        "&", "_").replace("=", "_")

                plugin = self.repo_plugin(url,
                                          repo_name,
                                          org=str(self.org_id or ''),
                                          channel_label=self.channel_label)

                if update_repodata:
                    plugin.clear_cache()

                if repo_id is not None:
                    keys = rhnSQL.fetchall_dict("""
                        select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key
                        from rhncontentsource cs inner join
                             rhncontentsourcessl csssl on cs.id = csssl.content_source_id inner join
                             rhncryptokey k1 on csssl.ssl_ca_cert_id = k1.id left outer join
                             rhncryptokey k2 on csssl.ssl_client_cert_id = k2.id left outer join
                             rhncryptokey k3 on csssl.ssl_client_key_id = k3.id
                        where cs.id = :repo_id
                        """,
                                                repo_id=int(repo_id))
                    if keys:
                        ssl_set = get_single_ssl_set(
                            keys, check_dates=self.check_ssl_dates)
                        if ssl_set:
                            plugin.set_ssl_options(ssl_set['ca_cert'],
                                                   ssl_set['client_cert'],
                                                   ssl_set['client_key'])
                        else:
                            raise ValueError(
                                "No valid SSL certificates were found for repository."
                            )

                if not self.no_packages:
                    ret = self.import_packages(plugin, repo_id, url)
                    failed_packages += ret
                    self.import_groups(plugin, url)

                if not self.no_errata:
                    self.import_updates(plugin, url)

                # only for repos obtained from the DB
                if self.sync_kickstart and repo_label:
                    try:
                        self.import_kickstart(plugin, repo_label)
                    except:
                        rhnSQL.rollback()
                        raise
            except Exception:
                e = sys.exc_info()[1]
                log2(0, 0, "ERROR: %s" % e, stream=sys.stderr)
                log2disk(0, "ERROR: %s" % e)
                # pylint: disable=W0104
                sync_error = -1
            if plugin is not None:
                plugin.clear_ssl_cache()
        # Update cache with package checksums
        rhnCache.set(checksum_cache_filename, self.checksum_cache)
        if self.regen:
            taskomatic.add_to_repodata_queue_for_channel_package_subscription(
                [self.channel_label], [], "server.app.yumreposync")
            taskomatic.add_to_erratacache_queue(self.channel_label)
        self.update_date()
        rhnSQL.commit()

        # update permissions
        fileutils.createPath(os.path.join(
            CFG.MOUNT_POINT,
            'rhn'))  # if the directory exists update ownership only
        for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')):
            for d in dirs:
                fileutils.setPermsPath(os.path.join(root, d), group='apache')
            for f in files:
                fileutils.setPermsPath(os.path.join(root, f), group='apache')
        elapsed_time = datetime.now() - start_time
        log(
            0, "Sync of channel completed in %s." %
            str(elapsed_time).split('.')[0])
        # if there is no global problems, but some packages weren't synced
        if sync_error == 0 and failed_packages > 0:
            sync_error = failed_packages
        return elapsed_time, sync_error
Exemple #34
0
    def __init__(self,
                 channel_label,
                 repo_type,
                 url=None,
                 fail=False,
                 filters=None,
                 no_errata=False,
                 sync_kickstart=False,
                 latest=False,
                 metadata_only=False,
                 strict=0,
                 excluded_urls=None,
                 no_packages=False,
                 log_dir="reposync",
                 log_level=None,
                 force_kickstart=False,
                 force_all_errata=False,
                 check_ssl_dates=False,
                 force_null_org_content=False):
        self.regen = False
        self.fail = fail
        self.filters = filters or []
        self.no_packages = no_packages
        self.no_errata = no_errata
        self.sync_kickstart = sync_kickstart
        self.force_all_errata = force_all_errata
        self.force_kickstart = force_kickstart
        self.latest = latest
        self.metadata_only = metadata_only
        self.ks_tree_type = 'externally-managed'
        self.ks_install_type = None

        initCFG('server.satellite')
        rhnSQL.initDB()

        # setup logging
        log_filename = channel_label + '.log'
        log_path = default_log_location + log_dir + '/' + log_filename
        if log_level is None:
            log_level = 0
        CFG.set('DEBUG', log_level)
        rhnLog.initLOG(log_path, log_level)
        # os.fchown isn't in 2.4 :/
        if isSUSE():
            os.system("chgrp www " + log_path)
        else:
            os.system("chgrp apache " + log_path)

        log2disk(0, "Command: %s" % str(sys.argv))
        log2disk(0, "Sync of channel started.")

        self.channel_label = channel_label
        self.channel = self.load_channel()
        if not self.channel:
            log(0, "Channel %s does not exist." % channel_label)

        if not self.channel['org_id'] or force_null_org_content:
            self.org_id = None
        else:
            self.org_id = int(self.channel['org_id'])

        if not url:
            # TODO:need to look at user security across orgs
            h = rhnSQL.prepare("""select s.id, s.source_url, s.label
                                  from rhnContentSource s,
                                       rhnChannelContentSource cs
                                 where s.id = cs.source_id
                                   and cs.channel_id = :channel_id""")
            h.execute(channel_id=int(self.channel['id']))
            source_data = h.fetchall_dict()
            self.urls = []
            if excluded_urls is None:
                excluded_urls = []
            if source_data:
                for row in source_data:
                    if row['source_url'] not in excluded_urls:
                        self.urls.append(
                            (row['id'], row['source_url'], row['label']))
        else:
            self.urls = [(None, u, None) for u in url]

        if not self.urls:
            log2(0,
                 0,
                 "Channel %s has no URL associated" % channel_label,
                 stream=sys.stderr)

        self.repo_plugin = self.load_plugin(repo_type)
        self.strict = strict
        self.all_packages = []
        self.check_ssl_dates = check_ssl_dates
        # Init cache for computed checksums to not compute it on each reposync run again
        self.checksum_cache = rhnCache.get(checksum_cache_filename)
        if self.checksum_cache is None:
            self.checksum_cache = {}
Exemple #35
0
 def endItemCallback(self):
     xmlSource.ChannelFamilyContainer.endItemCallback(self)
     if not self.batch:
         return
     syncLib.log(2, '   parsing family: %s' % (self.batch[-1]['name']))
Exemple #36
0
    def import_packages(self, plug, source_id, url):
        if (not self.filters) and source_id:
            h = rhnSQL.prepare("""
                    select flag, filter
                      from rhnContentSourceFilter
                     where source_id = :source_id
                     order by sort_order """)
            h.execute(source_id=source_id)
            filter_data = h.fetchall_dict() or []
            filters = [(row['flag'], re.split(r'[,\s]+', row['filter']))
                       for row in filter_data]
        else:
            filters = self.filters

        packages = plug.list_packages(filters, self.latest)
        self.all_packages.extend(packages)
        to_process = []
        num_passed = len(packages)
        log(0, "Packages in repo:             %5d" % plug.num_packages)
        if plug.num_excluded:
            log(0, "Packages passed filter rules: %5d" % num_passed)
        channel_id = int(self.channel['id'])

        for pack in packages:
            db_pack = rhnPackage.get_info_for_package(
                [pack.name, pack.version, pack.release, pack.epoch, pack.arch],
                channel_id, self.channel['org_id'])

            to_download = True
            to_link = True
            # Package exists in DB
            if db_pack:
                # Path in filesystem is defined
                if db_pack['path']:
                    pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path'])
                else:
                    pack.path = ""

                if self.metadata_only or self.match_package_checksum(pack.path,
                                                                     pack.checksum_type, pack.checksum):
                    # package is already on disk or not required
                    to_download = False
                    if db_pack['channel_id'] == channel_id:
                        # package is already in the channel
                        to_link = False

                elif db_pack['channel_id'] == channel_id:
                    # different package with SAME NVREA
                    self.disassociate_package(db_pack)

                # just pass data from DB, they will be used if there is no RPM available
                pack.checksum = db_pack['checksum']
                pack.checksum_type = db_pack['checksum_type']
                pack.epoch = db_pack['epoch']

            if to_download or to_link:
                to_process.append((pack, to_download, to_link))

        num_to_process = len(to_process)
        if num_to_process == 0:
            log(0, "No new packages to sync.")
            # If we are just appending, we can exit
            if not self.strict:
                return
        else:
            log(0, "Packages already synced:      %5d" % (num_passed - num_to_process))
            log(0, "Packages to sync:             %5d" % num_to_process)

        self.regen = True
        is_non_local_repo = (url.find("file:/") < 0)

        for (index, what) in enumerate(to_process):
            pack, to_download, to_link = what
            localpath = None
            # pylint: disable=W0703
            try:
                log(0, "%d/%d : %s" % (index + 1, num_to_process, pack.getNVREA()))
                if to_download:
                    pack.path = localpath = plug.get_package(pack, metadata_only=self.metadata_only)
                    pack.load_checksum_from_header()
                    pack.upload_package(self.channel, metadata_only=self.metadata_only)
            except KeyboardInterrupt:
                raise
            except Exception:
                e = sys.exc_info()[1]
                log2stderr(0, e)
                if self.fail:
                    raise
                to_process[index] = (pack, False, False)
                continue
            finally:
                if is_non_local_repo and localpath and os.path.exists(localpath):
                    os.remove(localpath)

        log(0, "Linking packages to channel.")
        if self.strict:
            import_batch = [self.associate_package(pack)
                            for pack in self.all_packages]
        else:
            import_batch = [self.associate_package(pack)
                            for (pack, to_download, to_link) in to_process
                            if to_link]
        backend = SQLBackend()
        caller = "server.app.yumreposync"
        importer = ChannelPackageSubscription(import_batch,
                                              backend, caller=caller, repogen=False,
                                              strict=self.strict)
        importer.run()
        backend.commit()
Exemple #37
0
 def import_updates(self, plug, url):
     notices = plug.get_updates()
     log(0, "Repo %s has %s errata." % (url, len(notices)))
     if notices:
         self.upload_updates(notices)
Exemple #38
0
 def _xmlrpc(function, params):
     try:
         retval = getattr(BaseWireSource.serverObj, function)(*params)
     except TypeError, e:
         log(-1, 'ERROR: during "getattr(BaseWireSource.serverObj, %s)(*(%s))"' % (function, params))
         raise
Exemple #39
0
    def sync(self, update_repodata=False):
        """Trigger a reposync"""
        start_time = datetime.now()
        for (repo_id, url, repo_label, channel_family_id) in self.urls:
            log(0, "Repo URL: %s" % url)
            plugin = None

            # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line
            if url.startswith("uln://"):
                self.repo_plugin = self.load_plugin("uln")

            # pylint: disable=W0703
            try:
                # use modified relative_url as name of repo plugin, because
                # it used as name of cache directory as well

                relative_url = '_'.join(url.split('://')[1].split('/')[1:])
                plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_")

                plugin = self.repo_plugin(url, plugin_name)

                if update_repodata:
                    plugin.clear_cache()

                if repo_id is not None:
                    keys = rhnSQL.fetchone_dict("""
                        select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key
                        from rhncontentssl
                                join rhncryptokey k1
                                on rhncontentssl.ssl_ca_cert_id = k1.id
                                left outer join rhncryptokey k2
                                on rhncontentssl.ssl_client_cert_id = k2.id
                                left outer join rhncryptokey k3
                                on rhncontentssl.ssl_client_key_id = k3.id
                        where rhncontentssl.content_source_id = :repo_id
                        or rhncontentssl.channel_family_id = :channel_family_id
                        """, repo_id=int(repo_id), channel_family_id=int(channel_family_id))
                    if keys and ('ca_cert' in keys):
                        plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key'])

                if not self.no_packages:
                    self.import_packages(plugin, repo_id, url)
                    self.import_groups(plugin, url)

                if not self.no_errata:
                    self.import_updates(plugin, url)

                # only for repos obtained from the DB
                if self.sync_kickstart and repo_label:
                    try:
                        self.import_kickstart(plugin, repo_label)
                    except:
                        rhnSQL.rollback()
                        raise
            except Exception:
                e = sys.exc_info()[1]
                log2stderr(0, "ERROR: %s" % e)
            if plugin is not None:
                plugin.clear_ssl_cache()
        if self.regen:
            taskomatic.add_to_repodata_queue_for_channel_package_subscription(
                [self.channel_label], [], "server.app.yumreposync")
            taskomatic.add_to_erratacache_queue(self.channel_label)
        self.update_date()
        rhnSQL.commit()
        elapsed_time = datetime.now() - start_time
        log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0])
        return elapsed_time
Exemple #40
0
    def import_kickstart(self, plug, repo_label):
        ks_path = 'rhn/kickstart/'
        ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '',
                               repo_label.replace(' ', '_'))
        if len(ks_tree_label) < 4:
            ks_tree_label += "_repo"

        # construct ks_path and check we already have this KS tree synced
        id_request = """
                select id
                from rhnKickstartableTree
                where channel_id = :channel_id and label = :label
                """

        if self.org_id:
            ks_path += str(self.org_id) + '/' + ks_tree_label
            # Trees synced from external repositories are expected to have full path it database
            db_path = os.path.join(CFG.MOUNT_POINT, ks_path)
            row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id",
                                       channel_id=self.channel['id'],
                                       label=ks_tree_label,
                                       org_id=self.org_id)
        else:
            ks_path += ks_tree_label
            db_path = ks_path
            row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL",
                                       channel_id=self.channel['id'],
                                       label=ks_tree_label)

        treeinfo_path = ['treeinfo', '.treeinfo']
        treeinfo_parser = None
        for path in treeinfo_path:
            log(1, "Trying " + path)
            treeinfo = plug.get_file(
                path, os.path.join(plug.repo.basecachedir, plug.name))
            if treeinfo:
                try:
                    treeinfo_parser = TreeInfoParser(treeinfo)
                    break
                except TreeInfoError:
                    pass

        if not treeinfo_parser:
            log(0, "Kickstartable tree not detected (no valid treeinfo file)")
            return

        if self.ks_install_type is None:
            family = treeinfo_parser.get_family()
            if family == 'Fedora':
                self.ks_install_type = 'fedora18'
            elif family == 'CentOS':
                self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version(
                )
            else:
                self.ks_install_type = 'generic_rpm'

        fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path))
        # Make sure images are included
        to_download = set()
        for repo_path in treeinfo_parser.get_images():
            local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)
            # TODO: better check
            if not os.path.exists(local_path) or self.force_kickstart:
                to_download.add(repo_path)

        if row:
            log(
                0,
                "Kickstartable tree %s already synced. Updating content..." %
                ks_tree_label)
            ks_id = row['id']
        else:
            row = rhnSQL.fetchone_dict("""
                select sequence_nextval('rhn_kstree_id_seq') as id from dual
                """)
            ks_id = row['id']

            rhnSQL.execute("""
                       insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type,
                                                         install_type, last_modified, created, modified)
                       values (:id, :org_id, :label, :base_path, :channel_id,
                                 ( select id from rhnKSTreeType where label = :ks_tree_type),
                                 ( select id from rhnKSInstallType where label = :ks_install_type),
                                 current_timestamp, current_timestamp, current_timestamp)""",
                           id=ks_id,
                           org_id=self.org_id,
                           label=ks_tree_label,
                           base_path=db_path,
                           channel_id=self.channel['id'],
                           ks_tree_type=self.ks_tree_type,
                           ks_install_type=self.ks_install_type)

            log(
                0, "Added new kickstartable tree %s. Downloading content..." %
                ks_tree_label)

        insert_h = rhnSQL.prepare("""
                insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created,
                 modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size,
                 epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp)
        """)

        delete_h = rhnSQL.prepare("""
                delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path
        """)

        # Downloading/Updating content of KS Tree
        # start from root dir
        is_root = True
        dirs_queue = ['']
        log(0, "Gathering all files in kickstart repository...")
        while len(dirs_queue) > 0:
            cur_dir_name = dirs_queue.pop(0)
            cur_dir_html = plug.get_file(cur_dir_name)
            if cur_dir_html is None:
                continue

            blacklist = None
            if is_root:
                blacklist = [treeinfo_parser.get_package_dir() + '/']
                is_root = False

            parser = KSDirParser(cur_dir_html, blacklist)

            for ks_file in parser.get_content():
                repo_path = cur_dir_name + ks_file['name']
                # if this is a directory, just add a name into queue (like BFS algorithm)
                if ks_file['type'] == 'DIR':
                    dirs_queue.append(repo_path)
                    continue

                if not os.path.exists(
                        os.path.join(CFG.MOUNT_POINT, ks_path,
                                     repo_path)) or self.force_kickstart:
                    to_download.add(repo_path)

        if to_download:
            log(0, "Downloading %d kickstart files." % len(to_download))
            progress_bar = ProgressBarLogger("Downloading kickstarts:",
                                             len(to_download))
            downloader = ThreadedDownloader(force=self.force_kickstart)
            for item in to_download:
                params = {}
                plug.set_download_parameters(
                    params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item))
                downloader.add(params)
            downloader.set_log_obj(progress_bar)
            downloader.run()
            log2disk(0, "Download finished.")
            for item in to_download:
                st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item))
                # update entity about current file in a database
                delete_h.execute(id=ks_id, path=item)
                insert_h.execute(id=ks_id,
                                 path=item,
                                 checksum=getFileChecksum(
                                     'sha256',
                                     os.path.join(CFG.MOUNT_POINT, ks_path,
                                                  item)),
                                 st_size=st.st_size,
                                 st_time=st.st_mtime)
        else:
            log(0, "No new kickstart files to download.")

        # set permissions recursively
        rhnSQL.commit()
Exemple #41
0
    def import_kickstart(self, plug, repo_label):
        ks_path = 'rhn/kickstart/'
        ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_'))
        if len(ks_tree_label) < 4:
            ks_tree_label += "_repo"

        # construct ks_path and check we already have this KS tree synced
        id_request = """
                select id
                from rhnKickstartableTree
                where channel_id = :channel_id and label = :label
                """

        if 'org_id' in self.channel and self.channel['org_id']:
            ks_path += str(self.channel['org_id']) + '/' + ks_tree_label
            # Trees synced from external repositories are expected to have full path it database
            db_path = os.path.join(CFG.MOUNT_POINT, ks_path)
            row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'],
                                       label=ks_tree_label, org_id=self.channel['org_id'])
        else:
            ks_path += ks_tree_label
            db_path = ks_path
            row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'],
                                       label=ks_tree_label)

        treeinfo_path = ['treeinfo', '.treeinfo']
        treeinfo_parser = None
        for path in treeinfo_path:
            log(1, "Trying " + path)
            treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path))
            if treeinfo:
                try:
                    treeinfo_parser = TreeInfoParser(treeinfo)
                    break
                except TreeInfoError:
                    pass

        if not treeinfo_parser:
            log(0, "Kickstartable tree not detected (no valid treeinfo file)")
            return

        # Make sure images are included
        to_download = []
        for repo_path in treeinfo_parser.get_images():
            local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)
            # TODO: better check
            if not os.path.exists(local_path):
                to_download.append(repo_path)

        if row:
            log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label)
            ks_id = row['id']
        else:
            row = rhnSQL.fetchone_dict("""
                select sequence_nextval('rhn_kstree_id_seq') as id from dual
                """)
            ks_id = row['id']

            rhnSQL.execute("""
                       insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type,
                                                         install_type, last_modified, created, modified)
                       values (:id, :org_id, :label, :base_path, :channel_id,
                                 ( select id from rhnKSTreeType where label = :ks_tree_type),
                                 ( select id from rhnKSInstallType where label = :ks_install_type),
                                 current_timestamp, current_timestamp, current_timestamp)""", id=ks_id,
                           org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path,
                           channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type,
                           ks_install_type=self.ks_install_type)

            log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label)

        insert_h = rhnSQL.prepare("""
                insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created,
                 modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size,
                 epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp)
        """)

        delete_h = rhnSQL.prepare("""
                delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path
        """)

        # Downloading/Updating content of KS Tree
        # start from root dir
        dirs_queue = ['']
        log(0, "Gathering all files in kickstart repository...")
        while len(dirs_queue) > 0:
            cur_dir_name = dirs_queue.pop(0)
            cur_dir_html = plug.get_file(cur_dir_name)
            if cur_dir_html is None:
                continue

            parser = KSDirParser(cur_dir_html)

            for ks_file in parser.get_content():
                repo_path = cur_dir_name + ks_file['name']
                # if this is a directory, just add a name into queue (like BFS algorithm)
                if ks_file['type'] == 'DIR':
                    dirs_queue.append(repo_path)
                    continue

                if repo_path not in to_download:
                    to_download.append(repo_path)

        if to_download:
            log(0, "Downloading %d files." % len(to_download))
            for item in to_download:
                for retry in range(3):
                    try:
                        log(1, "Retrieving %s" % item)
                        plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path))
                        st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item))
                        break
                    except OSError:  # os.stat if the file wasn't downloaded
                        if retry < 3:
                            log(2, "Retry download %s: attempt #%d" % (item, retry + 1))
                        else:
                            raise
                # update entity about current file in a database
                delete_h.execute(id=ks_id, path=item)
                insert_h.execute(id=ks_id, path=item,
                                 checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)),
                                 st_size=st.st_size, st_time=st.st_mtime)
        else:
            log(0, "Nothing to download.")

        rhnSQL.commit()