def are_we_overwriting(flist): already = [] for f in flist: if os.path.exists(f): already.append(f) if len(already) < 1: return elif len(already) < 2: report = '%s already exists' % already[0] elif len(already) < 3: report = '%s and %s already exist' % (already[0], already[1]) else: report = ', '.join(already) sep = ', and ' for a in already: report = sep + a + report sep = ', ' report = report[2:] answer = raw_input('%s. Are you sure? > ' % report) if not re.search(r'^\s*[Yy]', answer): sys.exit(1) U.safe_unlink(already)
def are_we_overwriting(flist): """ Check for whether we are overwriting the program or creating it new """ already = [] for filename in flist: if os.path.exists(filename): already.append(filename) if len(already) < 1: return elif len(already) < 2: report = '%s already exists' % already[0] elif len(already) < 3: report = '%s and %s already exist' % (already[0], already[1]) else: report = ', '.join(already) sep = ', and ' for item in already: report = sep + item + report sep = ', ' report = report[2:] answer = raw_input('%s. Are you sure? > ' % report) if not re.search(r'^\s*[Yy]', answer): sys.exit(1) U.safe_unlink(already)
def check_project(self, project, try_again=True): """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """ project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, "_status") try: url = osc_copy.makeurl(self.conf.apiurl, ["status", "project", project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self.check_project(project, False) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(filename) if type(e) == urllib2.HTTPError: if e.code == 404: print >> sys.stderr, "Project %s doesn't exist." % (project,) elif e.code == 400: # the status page doesn't always work :/ self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True) elif try_again: self.check_project(project, False) else: print >> sys.stderr, "Cannot get status of %s: %s" % (project, e) return
def checkout_project_pkgmeta(self, project, try_again=True): """ Checks out the packages metadata of all packages in a project. """ project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, "_pkgmeta") tmpfilename = filename + ".new" try: url = osc_copy.makeurl( self.conf.apiurl, ["search", "package"], ["match=%s" % urllib.quote("@project='%s'" % project)] ) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self.checkout_project_pkgmeta(project, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, "Project %s doesn't exist." % (project,) elif try_again: self.checkout_project_pkgmeta(project, False) else: print >> sys.stderr, "Cannot get packages metadata of %s: %s" % (project, e) return
def _get_packages_in_project(self, project, try_again=True): project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, '_pkglist') try: url = osc_copy.makeurl(self.conf.apiurl, ['public', 'source', project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self._get_packages_in_project(project, False) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(filename) if type(e) == urllib2.HTTPError and e.code == 404: return (None, 'Project %s doesn\'t exist.' % (project, )) elif try_again: return self._get_packages_in_project(project, False) else: return (None, str(e))
def _get_packages_in_project(self, project, try_again=True): project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, "_pkglist") try: url = osc_copy.makeurl(self.conf.apiurl, ["public", "source", project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self._get_packages_in_project(project, False) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(filename) if type(e) == urllib2.HTTPError and e.code == 404: return (None, "Project %s doesn't exist." % (project,)) elif try_again: return self._get_packages_in_project(project, False) else: return (None, str(e))
def checkout_project_pkgmeta(self, project, try_again=True): """ Checks out the packages metadata of all packages in a project. """ project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, '_pkgmeta') tmpfilename = filename + '.new' try: url = osc_copy.makeurl( self.conf.apiurl, ['search', 'package'], ['match=%s' % urllib.quote('@project=\'%s\'' % project)]) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self.checkout_project_pkgmeta(project, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, 'Project %s doesn\'t exist.' % (project, ) elif try_again: self.checkout_project_pkgmeta(project, False) else: print >> sys.stderr, 'Cannot get packages metadata of %s: %s' % ( project, e) return
def _get_file(self, project, package, filename, size, revision=None, try_again=True): """ Download a file of a package. """ package_dir = os.path.join(self.dest_dir, project, package) destfile = os.path.join(package_dir, filename) tmpdestfile = destfile + '.new' try: query = None if revision: query = {'rev': revision} url = osc_copy.makeurl(self.conf.apiurl, [ 'public', 'source', project, package, urllib.request.pathname2url(filename) ], query=query) length = self._download_url_to_file(url, tmpdestfile) if length != size: if try_again: util.safe_unlink(tmpdestfile) return self._get_file(project, package, filename, size, revision, False) os.rename(tmpdestfile, destfile) except (urllib.error.HTTPError, urllib.error.URLError, socket.error) as e: util.safe_unlink(tmpdestfile) if type(e) == urllib.error.HTTPError and e.code == 404: print('File in package %s of project %s doesn\'t exist.' % (filename, package, project), file=sys.stderr) elif try_again: self._get_file(project, package, filename, size, revision, False) else: print( 'Cannot get file %s for %s from %s: %s (queueing for next run)' % (filename, package, project, e), file=sys.stderr) self.error_queue.put((project, package)) return
def _get_files_metadata(self, project, package, save_basename, revision=None, try_again=True): """ Download the file list of a package. """ package_dir = os.path.join(self.dest_dir, project, package) filename = os.path.join(package_dir, save_basename) tmpfilename = filename + '.new' # download files metadata try: query = None if revision: query = {'rev': revision} url = osc_copy.makeurl(self.conf.apiurl, ['public', 'source', project, package], query=query) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self._get_files_metadata(project, package, save_basename, revision, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, 'Package %s doesn\'t exist in %s.' % ( package, project) elif try_again: return self._get_files_metadata(project, package, save_basename, revision, False) elif revision: print >> sys.stderr, 'Cannot download file list of %s from %s with specified revision: %s' % ( package, project, e) else: print >> sys.stderr, 'Cannot download file list of %s from %s: %s (queueing for next run)' % ( package, project, e) self.error_queue.put((project, package)) return None
def _get_files_metadata(self, project, package, save_basename, revision=None, try_again=True): """ Download the file list of a package. """ package_dir = os.path.join(self.dest_dir, project, package) filename = os.path.join(package_dir, save_basename) tmpfilename = filename + ".new" # download files metadata try: query = None if revision: query = {"rev": revision} url = osc_copy.makeurl(self.conf.apiurl, ["public", "source", project, package], query=query) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self._get_files_metadata(project, package, save_basename, revision, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, "Package %s doesn't exist in %s." % (package, project) elif try_again: return self._get_files_metadata(project, package, save_basename, revision, False) elif revision: print >> sys.stderr, "Cannot download file list of %s from %s with specified revision: %s" % ( package, project, e, ) else: print >> sys.stderr, "Cannot download file list of %s from %s: %s (queueing for next run)" % ( package, project, e, ) self.error_queue.put((project, package)) return None
def checkout_package_meta(self, project, package, try_again=True): """ Checks out the metadata of a package. If we're interested in devel projects of this project, and the devel package is not in a checked out devel project, then we queue a checkout of this devel project. """ package_dir = os.path.join(self.dest_dir, project, package) util.safe_mkdir_p(package_dir) filename = os.path.join(package_dir, '_meta') tmpfilename = filename + '.new' try: url = osc_copy.makeurl( self.conf.apiurl, ['public', 'source', project, package, '_meta']) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self.checkout_package_meta(project, package, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, 'Package %s of project %s doesn\'t exist.' % ( package, project) elif try_again: self.checkout_package_meta(project, package, False) else: print >> sys.stderr, 'Cannot get metadata of package %s in %s: %s (queueing for next run)' % ( package, project, e) self.error_queue.put((project, package)) return
def checkout_package_meta(self, project, package, try_again=True): """ Checks out the metadata of a package. If we're interested in devel projects of this project, and the devel package is not in a checked out devel project, then we queue a checkout of this devel project. """ package_dir = os.path.join(self.dest_dir, project, package) util.safe_mkdir_p(package_dir) filename = os.path.join(package_dir, "_meta") tmpfilename = filename + ".new" try: url = osc_copy.makeurl(self.conf.apiurl, ["public", "source", project, package, "_meta"]) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self.checkout_package_meta(project, package, False) os.rename(tmpfilename, filename) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpfilename) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, "Package %s of project %s doesn't exist." % (package, project) elif try_again: self.checkout_package_meta(project, package, False) else: print >> sys.stderr, "Cannot get metadata of package %s in %s: %s (queueing for next run)" % ( package, project, e, ) self.error_queue.put((project, package)) return
def _get_file(self, project, package, filename, size, revision=None, try_again=True): """ Download a file of a package. """ package_dir = os.path.join(self.dest_dir, project, package) destfile = os.path.join(package_dir, filename) tmpdestfile = destfile + ".new" try: query = None if revision: query = {"rev": revision} url = osc_copy.makeurl( self.conf.apiurl, ["public", "source", project, package, urllib.pathname2url(filename)], query=query ) length = self._download_url_to_file(url, tmpdestfile) if length != size: if try_again: util.safe_unlink(tmpdestfile) return self._get_file(project, package, filename, size, revision, False) os.rename(tmpdestfile, destfile) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(tmpdestfile) if type(e) == urllib2.HTTPError and e.code == 404: print >> sys.stderr, "File in package %s of project %s doesn't exist." % (filename, package, project) elif try_again: self._get_file(project, package, filename, size, revision, False) else: print >> sys.stderr, "Cannot get file %s for %s from %s: %s (queueing for next run)" % ( filename, package, project, e, ) self.error_queue.put((project, package)) return
def check_project(self, project, try_again=True): """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """ project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, '_status') try: url = osc_copy.makeurl(self.conf.apiurl, ['status', 'project', project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self.check_project(project, False) except (urllib2.HTTPError, urllib2.URLError, socket.error), e: util.safe_unlink(filename) if type(e) == urllib2.HTTPError: if e.code == 404: print >> sys.stderr, 'Project %s doesn\'t exist.' % ( project, ) elif e.code == 400: # the status page doesn't always work :/ self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True) elif try_again: self.check_project(project, False) else: print >> sys.stderr, 'Cannot get status of %s: %s' % (project, e) return
def _get_packages_in_project(self, project, try_again=True): project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, '_pkglist') try: url = osc_copy.makeurl(self.conf.apiurl, ['public', 'source', project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self._get_packages_in_project(project, False) except (urllib.error.HTTPError, urllib.error.URLError, socket.error) as e: util.safe_unlink(filename) if type(e) == urllib.error.HTTPError and e.code == 404: return (None, 'Project %s doesn\'t exist.' % (project, )) elif try_again: return self._get_packages_in_project(project, False) else: return (None, str(e)) try: root = ET.parse(filename).getroot() except SyntaxError as e: util.safe_unlink(filename) if try_again: return self._get_packages_in_project(project, False) else: return (None, 'Cannot parse list of packages in %s: %s' % (project, e)) packages = [node.get('name') for node in root.findall('entry')] util.safe_unlink(filename) return (packages, None)
def check_project(self, project, try_again=True): """ Checks if the current checkout of a project is up-to-date, and queue task if necessary. """ project_dir = os.path.join(self.dest_dir, project) util.safe_mkdir_p(project_dir) filename = os.path.join(project_dir, '_status') try: url = osc_copy.makeurl(self.conf.apiurl, ['status', 'project', project]) length = self._download_url_to_file(url, filename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(filename) return self.check_project(project, False) except (urllib.error.HTTPError, urllib.error.URLError, socket.error) as e: util.safe_unlink(filename) if type(e) == urllib.error.HTTPError: if e.code == 404: print('Project %s doesn\'t exist.' % (project, ), file=sys.stderr) elif e.code == 400: # the status page doesn't always work :/ self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True) elif try_again: self.check_project(project, False) else: print('Cannot get status of %s: %s' % (project, e), file=sys.stderr) return try: packages_node = ET.parse(filename).getroot() except SyntaxError as e: util.safe_unlink(filename) if try_again: return self.check_project(project, False) else: print('Cannot parse status of %s: %s' % (project, e), file=sys.stderr) return # We will have to remove all subdirectories that just don't belong to # this project anymore. subdirs_to_remove = [ file for file in os.listdir(project_dir) if os.path.isdir(os.path.join(project_dir, file)) ] # Here's what we check to know if a package needs to be checked out again: # - if there's no subdir # - if it's a link: # - check that the md5 from the status is the xsrcmd5 from the file # list # - check that we have _files-expanded and that all spec files are # checked out # - if it's not a link: check that the md5 from the status is the # srcmd5 from the file list for node in packages_node.findall('package'): name = node.get('name') srcmd5 = node.get('srcmd5') is_link = len(node.findall('link')) > 0 try: subdirs_to_remove.remove(name) except ValueError: pass files = os.path.join(project_dir, name, '_files') if not os.path.exists(files): self.queue_checkout_package(project, name, primary=False) continue try: files_root = ET.parse(files).getroot() except SyntaxError: self.queue_checkout_package(project, name, primary=False) continue if is_link: previous_srcmd5 = files_root.get('xsrcmd5') else: previous_srcmd5 = files_root.get('srcmd5') if srcmd5 != previous_srcmd5: self.queue_checkout_package(project, name, primary=False) # make sure we have all spec files if is_link: # for links, we open the list of files when expanded files = os.path.join(project_dir, name, '_files-expanded') if not os.path.exists(files): self.queue_checkout_package(project, name, primary=False) continue try: files_root = ET.parse(files).getroot() except SyntaxError: self.queue_checkout_package(project, name, primary=False) continue cont = False for entry in files_root.findall('entry'): filename = entry.get('name') if filename.endswith('.spec'): specfile = os.path.join(project_dir, name, filename) if not os.path.exists(specfile): self.queue_checkout_package(project, name, primary=False) cont = True break if cont: continue # Remove useless subdirectories for subdir in subdirs_to_remove: shutil.rmtree(os.path.join(project_dir, subdir)) util.safe_unlink(filename)
self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True) elif try_again: self.check_project(project, False) else: print >> sys.stderr, 'Cannot get status of %s: %s' % (project, e) return try: packages_node = ET.parse(filename).getroot() except SyntaxError, e: util.safe_unlink(filename) if try_again: return self.check_project(project, False) else: print >> sys.stderr, 'Cannot parse status of %s: %s' % ( project, e) return # We will have to remove all subdirectories that just don't belong to # this project anymore. subdirs_to_remove = [ file for file in os.listdir(project_dir) if os.path.isdir(os.path.join(project_dir, file)) ]
def checkout_package_meta(self, project, package, try_again=True): """ Checks out the metadata of a package. If we're interested in devel projects of this project, and the devel package is not in a checked out devel project, then we queue a checkout of this devel project. """ package_dir = os.path.join(self.dest_dir, project, package) util.safe_mkdir_p(package_dir) filename = os.path.join(package_dir, '_meta') tmpfilename = filename + '.new' try: url = osc_copy.makeurl( self.conf.apiurl, ['public', 'source', project, package, '_meta']) length = self._download_url_to_file(url, tmpfilename) if length == 0: # metadata files should never be empty if try_again: util.safe_unlink(tmpfilename) return self.checkout_package_meta(project, package, False) os.rename(tmpfilename, filename) except (urllib.error.HTTPError, urllib.error.URLError, socket.error) as e: util.safe_unlink(tmpfilename) if type(e) == urllib.error.HTTPError and e.code == 404: print('Package %s of project %s doesn\'t exist.' % (package, project), file=sys.stderr) elif try_again: self.checkout_package_meta(project, package, False) else: print( 'Cannot get metadata of package %s in %s: %s (queueing for next run)' % (package, project, e), file=sys.stderr) self.error_queue.put((project, package)) return # Are we interested in devel projects of this project, and if yes, # should we check out the devel project if needed? if project not in self.conf.projects: return if not self.conf.projects[project].checkout_devel_projects: return try: package_node = ET.parse(filename).getroot() except SyntaxError: return devel_node = package_node.find('devel') if devel_node is None: return devel_project = devel_node.get('project') project_dir = os.path.join(self.dest_dir, devel_project) if not os.path.exists(project_dir): self.queue_checkout_project(devel_project, parent=project, primary=False)
if e.code == 404: print >> sys.stderr, "Project %s doesn't exist." % (project,) elif e.code == 400: # the status page doesn't always work :/ self.queue_checkout_project(project, primary=False, force_simple_checkout=True, no_config=True) elif try_again: self.check_project(project, False) else: print >> sys.stderr, "Cannot get status of %s: %s" % (project, e) return try: packages_node = ET.parse(filename).getroot() except SyntaxError, e: util.safe_unlink(filename) if try_again: return self.check_project(project, False) else: print >> sys.stderr, "Cannot parse status of %s: %s" % (project, e) return # We will have to remove all subdirectories that just don't belong to # this project anymore. subdirs_to_remove = [file for file in os.listdir(project_dir) if os.path.isdir(os.path.join(project_dir, file))] # Here's what we check to know if a package needs to be checked out again: # - if there's no subdir # - if it's a link: