def lookupChannel(self, name, username, password): log_debug(3) authobj = self._auth(username, password) authobj.isChannelAdmin() row = rhnSQL.fetchone_dict("select * from rhnChannel where label = :label", label=name) if row: row.update(self._insert_channel_family(row['id'])) row['last_modified'] = str(row['last_modified']) row['modified'] = str(row['modified']) row['created'] = str(row['created']) return removeNone(row) # Look the channel up by id try: name = int(name) except ValueError: return '' row = rhnSQL.fetchone_dict("select * from rhnChannel where id = :channel_id", channel_id = name) if row: row.update(self._insert_channel_family(row['id'])) return removeNone(row) return ''
def management_create_channel(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get('config_channel') # XXX Validate the namespace config_channel_name = dict.get('config_channel_name') or config_channel config_channel_description = dict.get('description') or config_channel row = rhnSQL.fetchone_dict(self._query_lookup_config_channel, org_id=self.org_id, config_channel=config_channel) if row: raise rhnFault(4010, "Configuration channel %s already exists" % config_channel, explain=0) insert_call = rhnSQL.Function('rhn_config.insert_channel', rhnSQL.types.NUMBER()) config_channel_id = insert_call(self.org_id, 'normal', config_channel_name, config_channel, config_channel_description) rhnSQL.commit() return {}
def isAllowedSlave(hostname): rhnSQL.initDB() if not rhnSQL.fetchone_dict("select 1 from rhnISSSlave where slave = :hostname and enabled = 'Y'", hostname=idn_puny_to_unicode(hostname)): log_error('Server "%s" is not enabled for ISS.' % hostname) return False return True
def lookup_org_config_channel_by_name(self, config_channel): row = rhnSQL.fetchone_dict(self._query_org_config_channels, config_channel=config_channel, org_id=self.org_id) if not row: raise rhnFault(4009, "Configuration channel %s does not exist" % config_channel, explain=0) return row
def getISSCurrentMaster(): rhnSQL.initDB() master = rhnSQL.fetchone_dict( "select label from rhnISSMaster where is_current_master = 'Y'") if not master: return None return master['label']
def management_remove_channel(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get('config_channel') # XXX Validate the namespace row = rhnSQL.fetchone_dict(self._query_config_channel_by_label, org_id=self.org_id, label=config_channel) if not row: raise rhnFault(4009, "Channel not found") delete_call = rhnSQL.Procedure('rhn_config.delete_channel') try: delete_call(row['id']) except rhnSQL.SQLError, e: errno = e.args[0] if errno == 2292: raise rhnFault(4005, "Cannot remove non-empty channel %s" % config_channel, explain=0), None, sys.exc_info()[2] raise
def management_remove_file(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get('config_channel') # XXX Validate the namespace path = dict.get('path') row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel, org_id=self.org_id, config_channel=config_channel, path=path) if not row: raise rhnFault(4011, "File %s does not exist in channel %s" % (path, config_channel), explain=0) config_file_id = row['id'] delete_call = rhnSQL.Procedure("rhn_config.delete_file") delete_call(config_file_id) rhnSQL.commit() return {}
def management_disable_file(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get('config_channel') # XXX Validate the namespace path = dict.get('path') t = rhnSQL.Table('rhnConfigFileState', 'label') state_id_dead = t['dead']['id'] row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel, config_channel=config_channel, path=path) if not row or row['state_id'] == state_id_dead: raise rhnFault(4011, "File %s does not exist in channel %s" % (path, config_channel), explain=0) rhnSQL.execute(self._query_update_file_state, config_file_id=row['id'], state_id=state_id_dead) rhnSQL.commit() return {}
def management_remove_channel(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get('config_channel') # XXX Validate the namespace row = rhnSQL.fetchone_dict(self._query_config_channel_by_label, org_id=self.org_id, label=config_channel) if not row: raise rhnFault(4009, "Channel not found") delete_call = rhnSQL.Procedure('rhn_config.delete_channel') try: delete_call(row['id']) except rhnSQL.SQLError: e = sys.exc_info()[1] errno = e.args[0] if errno == 2292: raise_with_tb(rhnFault(4005, "Cannot remove non-empty channel %s" % config_channel, explain=0), sys.exc_info()[2]) raise log_debug(5, "Removed:", config_channel) rhnSQL.commit() return ""
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label, channel_family_id) in self.urls: print("") self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentssl join rhncryptokey k1 on rhncontentssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentssl.ssl_client_key_id = k3.id where rhncontentssl.content_source_id = :repo_id or rhncontentssl.channel_family_id = :channel_family_id """, repo_id=int(repo_id), channel_family_id=int(channel_family_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() total_time = datetime.now() - start_time self.print_msg("Sync completed.") self.print_msg("Total time: %s" % str(total_time).split('.')[0])
def lookupChannelArch(self, label, username, password): log_debug(3) self._auth(username, password) row = rhnSQL.fetchone_dict("select id from rhnChannelArch where label = :label", label=label) if not row: return 0 return row['id']
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: print() self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() total_time = datetime.now() - start_time self.print_msg("Sync completed.") self.print_msg("Total time: %s" % str(total_time).split('.')[0])
def auth_system(self): if CFG.DISABLE_ISS: raise rhnFault(2005, _('ISS is disabled on this server.')) if not rhnSQL.fetchone_dict("select 1 from rhnISSSlave where slave = :hostname and enabled = 'Y'", hostname=idn_puny_to_unicode(self.remote_hostname)): raise rhnFault(2004, _('Server "%s" is not enabled for ISS.') % self.remote_hostname) return self.remote_hostname
def auth_system(self): if CFG.DISABLE_ISS: raise rhnFault(2005, _('ISS is disabled on this satellite.')) if not rhnSQL.fetchone_dict("select 1 from rhnISSSlave where slave = :hostname and enabled = 'Y'", hostname=idn_puny_to_unicode(self.remote_hostname)): raise rhnFault(2004, _('Server "%s" is not enabled for ISS.') % self.remote_hostname) return self.remote_hostname
def _get_file(self, config_channel, path, revision=None): log_debug(2, config_channel, path) params = {"org_id": self.org_id, "config_channel": config_channel, "path": path} if revision is None: # Fetch the latest q = self._query_get_file_latest else: params["revision"] = revision q = self._query_get_file_revision log_debug(4, params) return rhnSQL.fetchone_dict(q, **params)
def lookupOrgId(self, org_id, username, password): log_debug(3) self._auth(username, password) if not org_id: return '' row = rhnSQL.fetchone_dict("""select org_id from web_contact where login_uc = UPPER(:org_id)""", org_id=org_id) if row: return row['org_id'] try: org_id = int(org_id) except ValueError: raise rhnFault(42, "Invalid org_id ",explain=0) row = rhnSQL.fetchone_dict("""select id from web_customer where id = :org_id""", org_id=org_id) if row: return row['id'] return ''
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: print self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id)) if keys and keys.has_key('ca_cert'): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception, e: self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache()
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: print self.print_msg("Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict( """ select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id), ) if keys and keys.has_key("ca_cert"): plugin.set_ssl_options(keys["ca_cert"], keys["client_cert"], keys["client_key"]) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception, e: self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache()
def _get_file(self, config_channel, path, revision=None): log_debug(2, config_channel, path) params = { 'org_id': self.org_id, 'config_channel': config_channel, 'path': path, } if revision is None: # Fetch the latest q = self._query_get_file_latest else: params['revision'] = revision q = self._query_get_file_revision log_debug(4, params) return rhnSQL.fetchone_dict(q, **params)
def _insert_channel_family(self, channel_id): log_debug(3) # get channel family info for this channel # A channel can currently be in at most one channel family row = rhnSQL.fetchone_dict(""" select cfm.channel_family_id, cf.label channel_family from rhnChannelFamilyMembers cfm, rhnChannelFamily cf where cfm.channel_id = :channel_id and cfm.channel_family_id = cf.id """, channel_id=channel_id) if row: return removeNone(row) return { 'channel_family_id' :'', 'channel_family' : ''}
def lookupChannelFamily(self, name, username, password): log_debug(3) authobj = self._auth(username, password) if not authobj.isChannelAdmin(): raise rhnFault(50, "Invalid user permissions", explain=0) row = rhnSQL.fetchone_dict("select * from rhnChannelFamily where label = :label", label=name) if not row: return 0 row = removeNone(row) row['modified'] = str(row['modified']) row['created'] = str(row['created']) return row
def management_disable_file(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get("config_channel") # XXX Validate the namespace path = dict.get("path") t = rhnSQL.Table("rhnConfigFileState", "label") state_id_dead = t["dead"]["id"] row = rhnSQL.fetchone_dict(self._query_lookup_config_file_by_channel, config_channel=config_channel, path=path) if not row or row["state_id"] == state_id_dead: raise rhnFault(4011, "File %s does not exist in channel %s" % (path, config_channel), explain=0) rhnSQL.execute(self._query_update_file_state, config_file_id=row["id"], state_id=state_id_dead) rhnSQL.commit() return {}
def auth_system(self, req): if CFG.DISABLE_ISS: raise rhnFault(2005, _('ISS is disabled on this satellite.')) remote_hostname = req.get_remote_host(apache.REMOTE_DOUBLE_REV) row = rhnSQL.fetchone_dict(""" select id, allow_all_orgs from rhnISSSlave where slave = :hostname and enabled = 'Y' """, hostname=idn_puny_to_unicode(remote_hostname)) if not row: raise rhnFault(2004, _('Server "%s" is not enabled for ISS.') % remote_hostname) iss_slave_condition = "select id from web_customer" if not(row['allow_all_orgs'] == 'Y'): iss_slave_condition = "select rhnISSSlaveOrgs.org_id from rhnISSSlaveOrgs where slave_id = %d" % row['id'] return iss_slave_condition
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: self.print_msg("Repo URL: %s" % url) plugin = None try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict( """ select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id), ) if keys and keys.has_key("ca_cert"): plugin.set_ssl_options(keys["ca_cert"], keys["client_cert"], keys["client_key"]) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) if self.sync_kickstart: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception, e: self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache()
def sync(self): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: self.print_msg("Repo URL: %s" % url) plugin = None try: plugin = self.repo_plugin(url, self.channel_label) if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsourcessl join rhncryptokey k1 on rhncontentsourcessl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentsourcessl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentsourcessl.ssl_client_key_id = k3.id where rhncontentsourcessl.content_source_id = :repo_id """, repo_id=int(repo_id)) if keys and keys.has_key('ca_cert'): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) if self.sync_kickstart: try: self.import_kickstart(plugin, url, repo_label) except: rhnSQL.rollback() raise except Exception, e: self.error_msg("ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache()
def management_remove_channel(self, dict): log_debug(1) self._get_and_validate_session(dict) config_channel = dict.get("config_channel") # XXX Validate the namespace row = rhnSQL.fetchone_dict(self._query_config_channel_by_label, org_id=self.org_id, label=config_channel) if not row: raise rhnFault(4009, "Channel not found") delete_call = rhnSQL.Procedure("rhn_config.delete_channel") try: delete_call(row["id"]) except rhnSQL.SQLError, e: errno = e.args[0] if errno == 2292: raise rhnFault( 4005, "Cannot remove non-empty channel %s" % config_channel, explain=0 ), None, sys.exc_info()[2] raise
def import_kickstart(self, plug, url, repo_label): ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url = url + '/' self.print_msg("Kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return if rhnSQL.fetchone_dict(""" select id from rhnKickstartableTree where org_id = :org_id and channel_id = :channel_id and label = :label """, org_id=self.channel['org_id'], channel_id=self.channel['id'], label=ks_tree_label): print("Kickstartable tree %s already synced." % ks_tree_label) return row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] ks_path = 'rhn/kickstart/%s/%s' % (self.channel['org_id'], ks_tree_label) row = rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = 'externally-managed'), ( select id from rhnKSInstallType where label = 'generic_rpm'), current_timestamp, current_timestamp, current_timestamp) """, id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=os.path.join(CFG.MOUNT_POINT, ks_path), channel_id=self.channel['id']) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) dirs = [''] while len(dirs) > 0: d = dirs.pop(0) v = None if d == pxeboot_path: v = pxeboot else: v = plug.get_file(d) if v is None: continue for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', v)): if (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or re.search(r'\.rpm$', s)): continue if re.search(r'/$', s): dirs.append(d + s) continue local_path = os.path.join(CFG.MOUNT_POINT, ks_path, d, s) if os.path.exists(local_path): print("File %s%s already present locally" % (d, s)) else: print("Retrieving %s" % d + s) plug.get_file(d + s, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) insert_h.execute(id=ks_id, path=d + s, checksum=getFileChecksum('sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if self.org_id: ks_path += str(self.org_id) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.org_id) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file( path, os.path.join(plug.repo.basecachedir, plug.name)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return if self.ks_install_type is None: family = treeinfo_parser.get_family() if family == 'Fedora': self.ks_install_type = 'fedora18' elif family == 'CentOS': self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version( ) else: self.ks_install_type = 'generic_rpm' fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path)) # Make sure images are included to_download = set() for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path) or self.force_kickstart: to_download.add(repo_path) if row: log( 0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.org_id, label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log( 0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir is_root = True dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue blacklist = None if is_root: blacklist = [treeinfo_parser.get_package_dir() + '/'] is_root = False parser = KSDirParser(cur_dir_html, blacklist) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if not os.path.exists( os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) or self.force_kickstart: to_download.add(repo_path) if to_download: log(0, "Downloading %d kickstart files." % len(to_download)) progress_bar = ProgressBarLogger("Downloading kickstarts:", len(to_download)) downloader = ThreadedDownloader(force=self.force_kickstart) for item in to_download: params = {} plug.set_download_parameters( params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item)) downloader.add(params) downloader.set_log_obj(progress_bar) downloader.run() log2disk(0, "Download finished.") for item in to_download: st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum( 'sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "No new kickstart files to download.") # set permissions recursively rhnSQL.commit()
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return # Make sure images are included to_download = [] for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path): to_download.append(repo_path) if row: log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser(cur_dir_html) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if repo_path not in to_download: to_download.append(repo_path) if to_download: log(0, "Downloading %d files." % len(to_download)) for item in to_download: for retry in range(3): try: log(1, "Retrieving %s" % item) plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: log(2, "Retry download %s: attempt #%d" % (item, retry + 1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "Nothing to download.") rhnSQL.commit()
def sync(self, update_repodata=False): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label, channel_family_id) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") plugin = self.repo_plugin(url, plugin_name) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentssl join rhncryptokey k1 on rhncontentssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on rhncontentssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on rhncontentssl.ssl_client_key_id = k3.id where rhncontentssl.content_source_id = :repo_id or rhncontentssl.channel_family_id = :channel_family_id """, repo_id=int(repo_id), channel_family_id=int(channel_family_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) if not self.no_packages: self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2stderr(0, "ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() elapsed_time = datetime.now() - start_time log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) return elapsed_time
def sync(self, update_repodata=True): """Trigger a reposync""" failed_packages = 0 sync_error = 0 if not self.urls: sync_error = -1 start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: if repo_label: repo_name = repo_label else: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) repo_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") plugin = self.repo_plugin(url, repo_name, org=str(self.channel['org_id'] or ''), channel_label=self.channel_label) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsource cs join rhncryptokey k1 on cs.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on cs.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on cs.ssl_client_key_id = k3.id where cs.id = :repo_id """, repo_id=int(repo_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) if not self.no_packages: ret = self.import_packages(plugin, repo_id, url) failed_packages += ret self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2(0, 0, "ERROR: %s" % e, stream=sys.stderr) log2disk(0, "ERROR: %s" % e) # pylint: disable=W0104 sync_error == -1 if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() # update permissions fileutils.createPath(os.path.join(CFG.MOUNT_POINT, 'rhn')) # if the directory exists update ownership only for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')): for d in dirs: fileutils.setPermsPath(os.path.join(root, d), group='apache') for f in files: fileutils.setPermsPath(os.path.join(root, f), group='apache') elapsed_time = datetime.now() - start_time log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) # if there is no global problems, but some packages weren't synced if sync_error == 0 and failed_packages > 0: sync_error = failed_packages return elapsed_time, sync_error
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return if self.ks_install_type is None: family = treeinfo_parser.get_family() if family == 'Fedora': self.ks_install_type = 'fedora18' elif family == 'CentOS': self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version() else: self.ks_install_type = 'generic_rpm' # Make sure images are included to_download = [] for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path): to_download.append(repo_path) if row: log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser(cur_dir_html) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if repo_path not in to_download: to_download.append(repo_path) if to_download: log(0, "Downloading %d files." % len(to_download)) for item in to_download: for retry in range(3): try: log(1, "Retrieving %s" % item) plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: log(2, "Retry download %s: attempt #%d" % (item, retry + 1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "Nothing to download.") rhnSQL.commit()
def import_kickstart(self, plug, url, repo_label): pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url += '/' self.print_msg("Kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + CFG.MOUNT_POINT + ks_tree_label row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) if row: print("Kickstartable tree %s already synced with id = %d. Updating content..." % (ks_tree_label, row['id'])) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=ks_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) print("Added new kickstartable tree %s with id = %d. Downloading content..." % (ks_tree_label, row['id'])) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = None if cur_dir_name == pxeboot_path: cur_dir_html = pxeboot else: cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser() parser.feed(cur_dir_html.split('<HR>')[1]) for ks_file in parser.get_content(): # do not download rpms, they are already downloaded by self.import_packages() if re.search(r'\.rpm$', ks_file['name']) or re.search(r'\.\.', ks_file['name']): continue # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(cur_dir_name + ks_file['name']) continue else: local_path = os.path.join(CFG.MOUNT_POINT, ks_path, cur_dir_name, ks_file['name']) need_download = True if os.path.exists(local_path): t = os.path.getmtime(local_path) if ks_file['datetime'] == datetime.utcfromtimestamp(t).strftime('%d-%b-%Y %H:%M'): print("File %s%s already present locally" % (cur_dir_name, ks_file['name'])) need_download = False st = os.stat(local_path) else: os.unlink(os.path.join(CFG.MOUNT_POINT, ks_path, cur_dir_name + ks_file['name'])) if need_download: for retry in range(3): try: print("Retrieving %s" % cur_dir_name + ks_file['name']) plug.get_file(cur_dir_name + ks_file['name'], os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: print("Retry download %s: attempt #%d" % (cur_dir_name + ks_file['name'], retry+1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=(cur_dir_name + ks_file['name'])) insert_h.execute(id=ks_id, path=(cur_dir_name + ks_file['name']), checksum=getFileChecksum('sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def hasISSMaster(): rhnSQL.initDB() if rhnSQL.fetchone_dict( "select 1 from rhnISSMaster where is_current_master = 'Y'"): return True return False
def hasISSSlaves(): rhnSQL.initDB() if rhnSQL.fetchone_dict("select 1 from rhnISSSlave where enabled = 'Y'"): return True return False
def hasISSMaster(): rhnSQL.initDB() if rhnSQL.fetchone_dict("select 1 from rhnISSMaster where is_current_master = 'Y'"): return True return False
def import_kickstart(self, plug, url, repo_label): ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url = url + '/' self.error_msg("ERROR: kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return if rhnSQL.fetchone_dict(""" select id from rhnKickstartableTree where org_id = :org_id and channel_id = :channel_id and label = :label """, org_id=self.channel['org_id'], channel_id=self.channel['id'], label=ks_tree_label): print "Kickstartable tree %s already synced." % ks_tree_label return row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] ks_path = 'rhn/kickstart/%s/%s' % (self.channel['org_id'], ks_tree_label) row = rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = 'externally-managed'), ( select id from rhnKSInstallType where label = 'generic_rpm'), current_timestamp, current_timestamp, current_timestamp) """, id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=os.path.join(CFG.MOUNT_POINT, ks_path), channel_id=self.channel['id']) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) dirs = [''] while len(dirs) > 0: d = dirs.pop(0) v = None if d == pxeboot_path: v = pxeboot else: v = plug.get_file(d) if v is None: continue for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', v)): if (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or re.search(r'\.rpm$', s)): continue if re.search(r'/$', s): dirs.append(d + s) continue local_path = os.path.join(CFG.MOUNT_POINT, ks_path, d, s) if os.path.exists(local_path): print "File %s%s already present locally" % (d, s) else: print "Retrieving %s" % d + s plug.get_file(d + s, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) insert_h.execute(id=ks_id, path=d + s, checksum=getFileChecksum( 'sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def sync(self, update_repodata=False): """Trigger a reposync""" start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) plugin_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") plugin = self.repo_plugin(url, plugin_name) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchone_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsource cs join rhncryptokey k1 on cs.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on cs.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on cs.ssl_client_key_id = k3.id where cs.id = :repo_id """, repo_id=int(repo_id)) if keys and ('ca_cert' in keys): plugin.set_ssl_options(keys['ca_cert'], keys['client_cert'], keys['client_key']) if not self.no_packages: self.import_packages(plugin, repo_id, url) self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2stderr(0, "ERROR: %s" % e) if plugin is not None: plugin.clear_ssl_cache() if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() elapsed_time = datetime.now() - start_time log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) return elapsed_time