def check_staging_accept(self, project, target): comments = self.comment.get_comments(project_name=project) comment, _ = self.comment.comment_find(comments, MARKER) approver = self.is_approved(comment, comments) if not approver: return sections = self.parse_sections(comment['comment']) with tempfile.TemporaryDirectory() as tmpdirname: checkout_package(self.apiurl, target, '000package-groups', expand_link=True, outdir=tmpdirname) self.apply_commands(tmpdirname + '/summary-staging.txt', sections) self.apply_changes(tmpdirname + '/package-groups.changes', sections, approver) package = Package(tmpdirname) package.commit(msg='Approved packagelist changes', skip_local_service_run=True)
def checkOut(self, logView): """Checkout imaging folder @param logView: logView to print messages @type logView: Instance of logView This function checks the package out. """ # # aquire stdout zen=StringIO.StringIO() sys.stdout=zen # # remove the target dir - doppelt hält besser ... self.rm_rf(self.bsDir) shutil.rmtree(self.bsDir, ignore_errors=True) # #checkout try: core.checkout_package(self.configDict["api_url"], self.project, self.pkgname, prj_dir=self.bsDir) except: # # fehler während osc checkout e = str(sys.exc_info()) msg = str("Fehler während 'osc checkout'!\n%s\n".encode("utf-8")%(e,)) WARNING_URL_GUI = GUI_warning_popup.GUI_warning_popup(self.factory, self.myParent, msg) WARNING_URL_GUI.handleEvent() sys.stdout=sys.__stdout__ line = zen.getvalue() logView.appendLines(line.encode("utf-8")) return False # # stdout back sys.stdout=sys.__stdout__ line = zen.getvalue() logView.appendLines(line.encode("utf-8")) return True
def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split(' ') ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' ')) self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ] print('[{}] {}/{}: update and solve'.format(scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist(show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') print('{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) for package in checkout_list: if no_checkout: print('Skipping checkout of {}/{}'.format(project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) file_utils.copy_directory_contents(group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'reference-unsorted.yml', 'reference-summary.yml', 'package-groups.changes']) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') self.input_dir = group_dir self.output_dir = product_dir print('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')), locale = target_config.get('pkglistgen-locale'), locales_from = target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) print('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) print(subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \ not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) error_output = b'' reference_summary = os.path.join(group_dir, 'reference-summary.yml') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, 'summary.yml') with open(summary_file, 'w') as f: f.write('# Summary of packages in groups') for group in sorted(summary): # the unsorted group should appear filtered by # unneeded.yml - so we need the content of unsorted.yml # not unsorted.group (this grew a little unnaturally) if group == 'unsorted': continue f.write('\n' + group + ':\n') for package in sorted(summary[group]): f.write(' - ' + package + '\n') try: error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file]) except subprocess.CalledProcessError as e: error_output += e.output reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml') unsorted_file = os.path.join(product_dir, 'unsorted.yml') try: error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file]) except subprocess.CalledProcessError as e: error_output += e.output if len(error_output) > 0: self.logger.error('Difference in yml:\n' + error_output.decode('utf-8')) return True
p.update(rev) except KeyboardInterrupt, e: raise e except: logger.warning("Couldn't update %s.", package, exc_info=True) logger.info('Deleting %s and starting again...', pkgDir) tree.rmtree(pkgDir) else: # no exception from update() => no need to redo the checkout continue else: logger.debug("checking out %s into %s (.osc not found)", package, pkgDir) # If .../.osc didn't exist, or if it existed but we decided it wasn't # recoverable, check the package out. Expand links like osc checkout. osccore.checkout_package(apiurl, project, package.obsName, prj_dir=prjDir, expand_link=True) # Perform a sanity check self._validateCheckout(dist, component, package) def package(self, dist, component, name): try: for s in self.getSources(dist, component): if s['Package'] == name: return OBSPackage(self, dist, component, name) raise error.PackageNotFound(name, dist, component) except KeyError: raise error.PackageNotFound(name, dist, component) def getPackageFiles(self, dist, component, obsPkg): return osccore.meta_get_filelist(self.config("obs", "url"),
def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split( ' ') self.use_newest_version = str2bool( target_config.get('pkglistgen-use-newest-version', 'False')) self.repos = self.expand_repos(project, main_repo) logging.debug('[{}] {}/{}: update and solve'.format( scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. logging.info( '{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist( show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len( root.xpath('result[@state="dirty"]')): logging.info('{}/{} build in progress'.format( project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') logging.info( '{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) self.input_dir = group_dir self.output_dir = product_dir for package in checkout_list: if no_checkout: logging.debug('Skipping checkout of {}/{}'.format( project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir, outdir=os.path.join(cache_dir, package)) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) ignore_list = [ 'supportstatus.txt', 'summary-staging.txt', 'package-groups.changes' ] ignore_list += self.group_input_files() file_utils.copy_directory_contents(group_dir, product_dir, ignore_list) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') logging.debug('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project( ignore_unresolvable=str2bool( target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool( target_config.get('pkglistgen-ignore-recommended')), locale=target_config.get('pkglistgen-locale'), locales_from=target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list and not only_release_packages: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') try: self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) except MismatchedRepoException: logging.error( "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again." ) return delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) logging.debug('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) logging.debug( subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get( 'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \ not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') reference_summary = os.path.join(group_dir, f'summary-{scope}.txt') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, f'summary-{scope}.txt') output = [] for group in summary: for package in sorted(summary[group]): output.append(f'{package}:{group}') with open(summary_file, 'w') as f: for line in sorted(output): f.write(line + '\n') self.commit_package(product_dir) if os.path.isfile(reference_summary): return self.comment.handle_package_diff(project, reference_summary, summary_file)
def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split(' ') self.repos = self.expand_repos(project, main_repo) print('[{}] {}/{}: update and solve'.format(scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist(show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') print('{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) for package in checkout_list: if no_checkout: print('Skipping checkout of {}/{}'.format(project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) file_utils.copy_directory_contents(group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'reference-unsorted.yml', 'reference-summary.yml', 'package-groups.changes']) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') self.input_dir = group_dir self.output_dir = product_dir print('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')), locale = target_config.get('pkglistgen-local'), locales_from = target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) print('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) print(subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) error_output = '' reference_summary = os.path.join(group_dir, 'reference-summary.yml') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, 'summary.yml') with open(summary_file, 'w') as f: f.write('# Summary of packages in groups') for group in sorted(summary): # the unsorted group should appear filtered by # unneeded.yml - so we need the content of unsorted.yml # not unsorted.group (this grew a little unnaturally) if group == 'unsorted': continue f.write('\n' + group + ':\n') for package in sorted(summary[group]): f.write(' - ' + package + '\n') try: error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file]) except subprocess.CalledProcessError as e: error_output += e.output reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml') unsorted_file = os.path.join(product_dir, 'unsorted.yml') try: error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file]) except subprocess.CalledProcessError as e: error_output += e.output if len(error_output) > 0: self.logger.error('Difference in yml:\n' + error_output) return True
def update_and_solve_target(self, apiurl, target_project, target_config, main_repo, opts, skip_release=False): print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') url = makeurl(apiurl, ['source', opts.project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.options.dry: undelete_package(apiurl, opts.project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not opts.force: root = ET.fromstringlist( show_results_meta(apiurl, opts.project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len( root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(opts.project, product)) return checkout_list = [group, product] if not skip_release: checkout_list.append(release) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.options.dry: undelete_package(apiurl, opts.project, product, 'revive') print( '{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse.urlparse(apiurl).hostname cache_dir = save_cache_path('opensuse-packagelists', host, opts.project) if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) for package in checkout_list: checkout_package(apiurl, opts.project, package, expand_link=True, prj_dir=cache_dir) if not skip_release: self.unlink_all_except(release_dir) self.unlink_all_except(product_dir) self.copy_directory_contents( group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'package-groups.changes']) self.change_extension(product_dir, '.spec.in', '.spec') self.options.input_dir = group_dir self.options.output_dir = product_dir self.postoptparse() print('-> do_update') self.do_update('update', opts) print('-> do_solve') opts.ignore_unresolvable = bool( target_config.get('pkglistgen-ignore-unresolvable')) opts.ignore_recommended = bool( target_config.get('pkglistgen-ignore-recommended')) opts.include_suggested = bool( target_config.get('pkglistgen-include-suggested')) opts.locale = target_config.get('pkglistgen-local') opts.locales_from = target_config.get('pkglistgen-locales-from') self.do_solve('solve', opts) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') self.unlink_list(product_dir, delete_products) print('-> product service') for product_file in glob.glob(os.path.join(product_dir, '*.product')): print( subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, opts.project])) delete_kiwis = target_config.get( 'pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ') self.unlink_list(product_dir, delete_kiwis) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) if skip_release: self.unlink_list(None, spec_files) else: self.move_list(spec_files, release_dir) self.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) if not skip_release: self.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir)
package, exc_info=True) logger.info('Deleting %s and starting again...', pkgDir) tree.rmtree(pkgDir) else: # no exception from update() => no need to redo the checkout continue else: logger.debug("checking out %s into %s (.osc not found)", package, pkgDir) # If .../.osc didn't exist, or if it existed but we decided it wasn't # recoverable, check the package out. Expand links like osc checkout. osccore.checkout_package(apiurl, project, package.obsName, prj_dir=prjDir, expand_link=True) # Perform a sanity check self._validateCheckout(dist, component, package) def package(self, dist, component, name): try: for s in self.getSources(dist, component): if s['Package'] == name: return OBSPackage(self, dist, component, name) raise error.PackageNotFound(name, dist, component) except KeyError: raise error.PackageNotFound(name, dist, component)