def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split( ' ') self.use_newest_version = str2bool( target_config.get('pkglistgen-use-newest-version', 'False')) self.repos = self.expand_repos(project, main_repo) logging.debug('[{}] {}/{}: update and solve'.format( scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. logging.info( '{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist( show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len( root.xpath('result[@state="dirty"]')): logging.info('{}/{} build in progress'.format( project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') logging.info( '{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) self.input_dir = group_dir self.output_dir = product_dir for package in checkout_list: if no_checkout: logging.debug('Skipping checkout of {}/{}'.format( project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir, outdir=os.path.join(cache_dir, package)) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) ignore_list = [ 'supportstatus.txt', 'summary-staging.txt', 'package-groups.changes' ] ignore_list += self.group_input_files() file_utils.copy_directory_contents(group_dir, product_dir, ignore_list) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') logging.debug('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project( ignore_unresolvable=str2bool( target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool( target_config.get('pkglistgen-ignore-recommended')), locale=target_config.get('pkglistgen-locale'), locales_from=target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list and not only_release_packages: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') try: self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) except MismatchedRepoException: logging.error( "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again." ) return delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) logging.debug('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) logging.debug( subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get( 'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \ not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') reference_summary = os.path.join(group_dir, f'summary-{scope}.txt') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, f'summary-{scope}.txt') output = [] for group in summary: for package in sorted(summary[group]): output.append(f'{package}:{group}') with open(summary_file, 'w') as f: for line in sorted(output): f.write(line + '\n') self.commit_package(product_dir) if os.path.isfile(reference_summary): return self.comment.handle_package_diff(project, reference_summary, summary_file)
def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split(' ') ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' ')) self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ] print('[{}] {}/{}: update and solve'.format(scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist(show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') print('{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) for package in checkout_list: if no_checkout: print('Skipping checkout of {}/{}'.format(project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) file_utils.copy_directory_contents(group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'reference-unsorted.yml', 'reference-summary.yml', 'package-groups.changes']) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') self.input_dir = group_dir self.output_dir = product_dir print('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')), locale = target_config.get('pkglistgen-locale'), locales_from = target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) print('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) print(subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \ not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) error_output = b'' reference_summary = os.path.join(group_dir, 'reference-summary.yml') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, 'summary.yml') with open(summary_file, 'w') as f: f.write('# Summary of packages in groups') for group in sorted(summary): # the unsorted group should appear filtered by # unneeded.yml - so we need the content of unsorted.yml # not unsorted.group (this grew a little unnaturally) if group == 'unsorted': continue f.write('\n' + group + ':\n') for package in sorted(summary[group]): f.write(' - ' + package + '\n') try: error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file]) except subprocess.CalledProcessError as e: error_output += e.output reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml') unsorted_file = os.path.join(product_dir, 'unsorted.yml') try: error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file]) except subprocess.CalledProcessError as e: error_output += e.output if len(error_output) > 0: self.logger.error('Difference in yml:\n' + error_output.decode('utf-8')) return True
def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve): self.all_architectures = target_config.get('pkglistgen-archs').split(' ') self.repos = self.expand_repos(project, main_repo) print('[{}] {}/{}: update and solve'.format(scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') oldrepos = target_config.get('pkglistgen-repos', '000update-repos') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist(show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(project, product)) return drop_list = api.item_exists(project, oldrepos) checkout_list = [group, product, release] if drop_list and not only_release_packages: checkout_list.append(oldrepos) if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') print('{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) oldrepos_dir = os.path.join(cache_dir, oldrepos) for package in checkout_list: if no_checkout: print('Skipping checkout of {}/{}'.format(project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir) file_utils.unlink_all_except(release_dir, ['weakremovers.inc']) if not only_release_packages: file_utils.unlink_all_except(product_dir) file_utils.copy_directory_contents(group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'reference-unsorted.yml', 'reference-summary.yml', 'package-groups.changes']) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') self.input_dir = group_dir self.output_dir = product_dir print('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')), locale = target_config.get('pkglistgen-local'), locales_from = target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list: weakremovers_file = os.path.join(release_dir, 'weakremovers.inc') self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w')) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) print('-> product service') product_version = attribute_value_load(api.apiurl, project, 'ProductVersion') if not product_version: # for stagings the product version doesn't matter (I hope) product_version = '1' for product_file in glob.glob(os.path.join(product_dir, '*.product')): self.replace_product_version(product_file, product_version) print(subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8')) for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) # filter special inc file inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files) file_utils.move_list(inc_files, release_dir) # do not overwrite weakremovers.inc if it exists # we will commit there afterwards if needed if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')): file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) error_output = '' reference_summary = os.path.join(group_dir, 'reference-summary.yml') if os.path.isfile(reference_summary): summary_file = os.path.join(product_dir, 'summary.yml') with open(summary_file, 'w') as f: f.write('# Summary of packages in groups') for group in sorted(summary): # the unsorted group should appear filtered by # unneeded.yml - so we need the content of unsorted.yml # not unsorted.group (this grew a little unnaturally) if group == 'unsorted': continue f.write('\n' + group + ':\n') for package in sorted(summary[group]): f.write(' - ' + package + '\n') try: error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file]) except subprocess.CalledProcessError as e: error_output += e.output reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml') unsorted_file = os.path.join(product_dir, 'unsorted.yml') try: error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file]) except subprocess.CalledProcessError as e: error_output += e.output if len(error_output) > 0: self.logger.error('Difference in yml:\n' + error_output) return True