def attempt_source_package_import(distro, source, package_root, importer_handler): """Attempt to import a source package, and handle typical errors.""" package_name = source.get("Package", "unknown") try: try: do_one_sourcepackage(distro, source, package_root, importer_handler) except psycopg2.Error: log.exception( "Database error: unable to create SourcePackage for %s. " "Retrying once..", package_name) importer_handler.abort() time.sleep(15) do_one_sourcepackage(distro, source, package_root, importer_handler) except (InvalidVersionError, MissingRequiredArguments, DisplayNameDecodingError): log.exception("Unable to create SourcePackageData for %s", package_name) except (PoolFileNotFound, ExecutionError): # Problems with katie db stuff of opening files log.exception("Error processing package files for %s", package_name) except psycopg2.Error: log.exception( "Database errors made me give up: unable to create " "SourcePackage for %s", package_name) importer_handler.abort() except MultiplePackageReleaseError: log.exception("Database duplication processing %s", package_name)
def attempt_source_package_import(distro, source, package_root, importer_handler): """Attempt to import a source package, and handle typical errors.""" package_name = source.get("Package", "unknown") try: try: do_one_sourcepackage(distro, source, package_root, importer_handler) except psycopg2.Error: log.exception("Database error: unable to create SourcePackage for %s. " "Retrying once..", package_name) importer_handler.abort() time.sleep(15) do_one_sourcepackage(distro, source, package_root, importer_handler) except (InvalidVersionError, MissingRequiredArguments, DisplayNameDecodingError): log.exception("Unable to create SourcePackageData for %s", package_name) except (PoolFileNotFound, ExecutionError): # Problems with katie db stuff of opening files log.exception("Error processing package files for %s", package_name) except psycopg2.Error: log.exception("Database errors made me give up: unable to create " "SourcePackage for %s", package_name) importer_handler.abort() except MultiplePackageReleaseError: log.exception("Database duplication processing %s", package_name)
def listChannels(cls): """See `ISnapStoreClient`.""" if config.snappy.store_search_url is None: return _default_store_channels channels = None memcache_client = getUtility(IMemcacheClient) search_host = urlsplit(config.snappy.store_search_url).hostname memcache_key = ("%s:channels" % search_host).encode("UTF-8") cached_channels = memcache_client.get(memcache_key) if cached_channels is not None: try: channels = json.loads(cached_channels) except JSONDecodeError: log.exception( "Cannot load cached channels for %s; deleting" % search_host) memcache_client.delete(memcache_key) if (channels is None and not getFeatureFlag(u"snap.disable_channel_search")): path = "api/v1/channels" timeline = cls._getTimeline() if timeline is not None: action = timeline.start("store-search-get", "/" + path) channels_url = urlappend(config.snappy.store_search_url, path) try: response = urlfetch( channels_url, headers={"Accept": "application/hal+json"}) except requests.HTTPError as e: raise cls._makeSnapStoreError(BadSearchResponse, e) finally: if timeline is not None: action.finish() channels = response.json().get("_embedded", {}).get( "clickindex:channel", []) expire_time = time.time() + 60 * 60 * 24 memcache_client.set( memcache_key, json.dumps(channels), expire_time) if channels is None: channels = _default_store_channels return channels
def create_maps(self, arch_component_items): # Create the maps self.src_map = defaultdict(list) self.bin_map = {} # Iterate over ArchComponentItems instance to cover # all components in all architectures. for info_set in arch_component_items: # Run over the source stanzas and store info in src_map. We # make just one source map (instead of one per architecture) # because most of them are the same for all architectures, # but we go over it to also cover source packages that only # compile for one architecture. sources = apt_pkg.TagFile(info_set.srcfile) for section in sources: try: src_tmp = dict(section) src_tmp['Component'] = info_set.component src_name = src_tmp['Package'] except KeyError: log.exception( "Invalid Sources stanza in %s", info_set.sources_tagfile) continue self.src_map[src_name].append(src_tmp) # Check if it's in source-only mode. If so, skip binary index # mapping. if info_set.source_only: continue # Create a tmp map for binaries for one arch/component pair. self.bin_map.setdefault(info_set.arch, {}) tmpbin_map = self.bin_map[info_set.arch] binaries = apt_pkg.TagFile(info_set.binfile) for section in binaries: try: bin_tmp = dict(section) # The component isn't listed in the tagfile. bin_tmp['Component'] = info_set.component bin_name = bin_tmp['Package'] except KeyError: log.exception( "Invalid Releases stanza in %s", info_set.binaries_tagfile) continue tmpbin_map[bin_name] = bin_tmp # Run over the D-I stanzas and store info in tmp_bin_map. dibinaries = apt_pkg.TagFile(info_set.difile) for section in dibinaries: try: dibin_tmp = dict(section) dibin_tmp['Component'] = info_set.component dibin_name = dibin_tmp['Package'] except KeyError: log.exception("Invalid D-I Releases stanza in %s" % info_set.difile) continue tmpbin_map[dibin_name] = dibin_tmp
def run_gina(options, ztm, target_section): # Avoid circular imports. from lp.registry.interfaces.pocket import PackagePublishingPocket package_root = target_section.root distro = target_section.distro pocket_distroseries = target_section.pocketrelease distroseries = target_section.distroseries components = [c.strip() for c in target_section.components.split(",")] archs = [a.strip() for a in target_section.architectures.split(",")] pocket = target_section.pocket component_override = target_section.componentoverride source_only = target_section.source_only spnames_only = target_section.sourcepackagenames_only LIBRHOST = config.librarian.upload_host LIBRPORT = config.librarian.upload_port log.info("") log.info("=== Processing %s/%s/%s ===", distro, distroseries, pocket) log.debug("Packages read from: %s", package_root) log.info("Components to import: %s", ", ".join(components)) if component_override is not None: log.info("Override components to: %s", component_override) log.info("Architectures to import: %s", ", ".join(archs)) log.debug("Launchpad database: %s", config.database.rw_main_master) log.info("SourcePackage Only: %s", source_only) log.info("SourcePackageName Only: %s", spnames_only) log.debug("Librarian: %s:%s", LIBRHOST, LIBRPORT) log.info("") if not hasattr(PackagePublishingPocket, pocket.upper()): log.error("Could not find a pocket schema for %s", pocket) sys.exit(1) pocket = getattr(PackagePublishingPocket, pocket.upper()) if component_override: valid_components = [component.name for component in getUtility(IComponentSet)] if component_override not in valid_components: log.error("Could not find component %s", component_override) sys.exit(1) try: arch_component_items = ArchiveComponentItems(package_root, pocket_distroseries, components, archs, source_only) except MangledArchiveError: log.exception("Failed to analyze archive for %s", pocket_distroseries) sys.exit(1) packages_map = PackagesMap(arch_component_items) importer_handler = ImporterHandler(ztm, distro, distroseries, package_root, pocket, component_override) import_sourcepackages(distro, packages_map, package_root, importer_handler) importer_handler.commit() # XXX JeroenVermeulen 2011-09-07 bug=843728: Dominate binaries as well. dominate_imported_source_packages(ztm, log, distro, distroseries, pocket, packages_map) ztm.commit() if source_only: log.info("Source only mode... done") return for archtag in archs: try: importer_handler.ensure_archinfo(archtag) except DataSetupError: log.exception("Database setup required for run on %s", archtag) sys.exit(1) import_binarypackages(distro, packages_map, package_root, importer_handler) importer_handler.commit()
def import_binarypackages(distro, packages_map, package_root, importer_handler): nosource = [] # Run over all the architectures we have for archtag in packages_map.bin_map.keys(): npacks = len(packages_map.bin_map[archtag]) log.info("%i Binary Packages to be imported for %s", npacks, archtag) # Go over binarypackages importing them for this architecture for package_name in sorted(packages_map.bin_map[archtag].iterkeys()): binary = packages_map.bin_map[archtag][package_name] try: try: do_one_binarypackage(distro, binary, archtag, package_root, importer_handler) except psycopg2.Error: log.exception( "Database errors when importing a BinaryPackage " "for %s. Retrying once..", package_name ) importer_handler.abort() time.sleep(15) do_one_binarypackage(distro, binary, archtag, package_root, importer_handler) except (InvalidVersionError, MissingRequiredArguments): log.exception("Unable to create BinaryPackageData for %s", package_name) continue except (PoolFileNotFound, ExecutionError): # Problems with katie db stuff of opening files log.exception("Error processing package files for %s", package_name) continue except MultiplePackageReleaseError: log.exception("Database duplication processing %s", package_name) continue except psycopg2.Error: log.exception("Database errors made me give up: unable to create " "BinaryPackage for %s", package_name) importer_handler.abort() continue except NoSourcePackageError: log.exception("Failed to create Binary Package for %s", package_name) nosource.append(binary) continue if nosource: # XXX kiko 2005-10-23: untested log.warn("%i source packages not found", len(nosource)) for pkg in nosource: log.warn(pkg)
def create_maps(self, arch_component_items): # Create the maps self.src_map = defaultdict(list) self.bin_map = {} # Iterate over ArchComponentItems instance to cover # all components in all architectures. for info_set in arch_component_items: # Run over the source stanzas and store info in src_map. We # make just one source map (instead of one per architecture) # because most of them are the same for all architectures, # but we go over it to also cover source packages that only # compile for one architecture. sources = apt_pkg.TagFile(info_set.srcfile) try: for section in sources: try: src_tmp = dict(section) src_tmp['Component'] = info_set.component src_name = src_tmp['Package'] except KeyError: log.exception("Invalid Sources stanza in %s", info_set.sources_tagfile) continue self.src_map[src_name].append(src_tmp) except SystemError: log.exception("Invalid Sources stanza in %s", info_set.sources_tagfile) # Check if it's in source-only mode. If so, skip binary index # mapping. if info_set.source_only: continue # Create a tmp map for binaries for one arch/component pair. self.bin_map.setdefault(info_set.arch, {}) tmpbin_map = self.bin_map[info_set.arch] binaries = apt_pkg.TagFile(info_set.binfile) for section in binaries: try: bin_tmp = dict(section) # The component isn't listed in the tagfile. bin_tmp['Component'] = info_set.component bin_name = bin_tmp['Package'] except KeyError: log.exception("Invalid Releases stanza in %s", info_set.binaries_tagfile) continue tmpbin_map[bin_name] = bin_tmp # Run over the D-I stanzas and store info in tmp_bin_map. dibinaries = apt_pkg.TagFile(info_set.difile) for section in dibinaries: try: dibin_tmp = dict(section) dibin_tmp['Component'] = info_set.component dibin_name = dibin_tmp['Package'] except KeyError: log.exception("Invalid D-I Releases stanza in %s" % info_set.difile) continue tmpbin_map[dibin_name] = dibin_tmp
def run_gina(options, ztm, target_section): package_root = target_section.root distro = target_section.distro pocket_distroseries = target_section.pocketrelease distroseries = target_section.distroseries components = [c.strip() for c in target_section.components.split(",")] archs = [a.strip() for a in target_section.architectures.split(",")] pocket = target_section.pocket component_override = target_section.componentoverride source_only = target_section.source_only spnames_only = target_section.sourcepackagenames_only LIBRHOST = config.librarian.upload_host LIBRPORT = config.librarian.upload_port log.info("") log.info("=== Processing %s/%s/%s ===", distro, distroseries, pocket) log.debug("Packages read from: %s", package_root) log.info("Components to import: %s", ", ".join(components)) if component_override is not None: log.info("Override components to: %s", component_override) log.info("Architectures to import: %s", ", ".join(archs)) log.debug("Launchpad database: %s", config.database.rw_main_master) log.info("SourcePackage Only: %s", source_only) log.info("SourcePackageName Only: %s", spnames_only) log.debug("Librarian: %s:%s", LIBRHOST, LIBRPORT) log.info("") if not hasattr(PackagePublishingPocket, pocket.upper()): log.error("Could not find a pocket schema for %s", pocket) sys.exit(1) pocket = getattr(PackagePublishingPocket, pocket.upper()) if component_override: valid_components = [ component.name for component in getUtility(IComponentSet) ] if component_override not in valid_components: log.error("Could not find component %s", component_override) sys.exit(1) try: arch_component_items = ArchiveComponentItems(package_root, pocket_distroseries, components, archs, source_only) except MangledArchiveError: log.exception("Failed to analyze archive for %s", pocket_distroseries) sys.exit(1) packages_map = PackagesMap(arch_component_items) importer_handler = ImporterHandler(ztm, distro, distroseries, package_root, pocket, component_override) import_sourcepackages(distro, packages_map, package_root, importer_handler) importer_handler.commit() # XXX JeroenVermeulen 2011-09-07 bug=843728: Dominate binaries as well. dominate_imported_source_packages(ztm, log, distro, distroseries, pocket, packages_map) ztm.commit() if source_only: log.info('Source only mode... done') return for archtag in archs: try: importer_handler.ensure_arch(archtag) except DataSetupError: log.exception("Database setup required for run on %s", archtag) sys.exit(1) import_binarypackages(distro, packages_map, package_root, importer_handler) importer_handler.commit()
def import_binarypackages(distro, packages_map, package_root, importer_handler): nosource = [] # Run over all the architectures we have for archtag in packages_map.bin_map.keys(): npacks = len(packages_map.bin_map[archtag]) log.info('%i Binary Packages to be imported for %s', npacks, archtag) # Go over binarypackages importing them for this architecture for package_name in sorted(packages_map.bin_map[archtag].iterkeys()): binary = packages_map.bin_map[archtag][package_name] try: try: do_one_binarypackage(distro, binary, archtag, package_root, importer_handler) except psycopg2.Error: log.exception( "Database errors when importing a BinaryPackage " "for %s. Retrying once..", package_name) importer_handler.abort() time.sleep(15) do_one_binarypackage(distro, binary, archtag, package_root, importer_handler) except (InvalidVersionError, MissingRequiredArguments): log.exception("Unable to create BinaryPackageData for %s", package_name) continue except (PoolFileNotFound, ExecutionError): # Problems with katie db stuff of opening files log.exception("Error processing package files for %s", package_name) continue except MultiplePackageReleaseError: log.exception("Database duplication processing %s", package_name) continue except psycopg2.Error: log.exception( "Database errors made me give up: unable to create " "BinaryPackage for %s", package_name) importer_handler.abort() continue except NoSourcePackageError: log.exception("Failed to create Binary Package for %s", package_name) nosource.append(binary) continue if nosource: # XXX kiko 2005-10-23: untested log.warn('%i source packages not found', len(nosource)) for pkg in nosource: log.warn(pkg)