def init_pypi_mirror(self, proxy): """ initialize pypi mirror if no mirror state exists. """ self.proxy = proxy name2serials = self.keyfs.PYPISERIALS.get({}) if not name2serials: log.info("retrieving initial name/serial list") name2serials = proxy.list_packages_with_serial() if name2serials is None: from devpi_server.main import fatal fatal("mirror initialization failed: " "pypi.python.org not reachable") self.keyfs.PYPISERIALS.set(name2serials) else: log.info("reusing already cached name/serial list") # normalize to unicode->serial mapping for name in list(name2serials): if not py.builtin._istext(name): val = name2serials.pop(name) name2serials[py.builtin._totext(name, "utf-8")] = val self.name2serials = name2serials # create a mapping of normalized name to real name self.normname2name = d = dict() for name in name2serials: norm = normalize_name(name) if norm != name: d[norm] = name
def do_upgrade(xom): tw = py.io.TerminalWriter() serverdir = xom.config.serverdir exportdir = serverdir + "-export" if exportdir.check(): tw.line("removing exportdir: %s" % exportdir) exportdir.remove() newdir = serverdir + "-import" script = sys.argv[0] def rel(p): return py.path.local().bestrelpath(p) state_version = xom.get_state_version() tw.sep("-", "exporting to %s" % rel(exportdir)) if Version(state_version) > Version(server_version): fatal("%s has state version %s which is newer than what we " "can handle (%s)" % (xom.config.serverdir, state_version, server_version)) elif Version(state_version) < Version(server_version): tw.line("creating server venv %s ..." % state_version) tmpdir = py.path.local.make_numbered_dir("devpi-upgrade") venv = create_server_venv(tw, state_version, tmpdir) tw.line("server venv %s created: %s" % (server_version, tmpdir)) venv.check_call([ "devpi-server", "--serverdir", str(serverdir), "--export", str(exportdir) ]) else: #tw.sep("-", "creating venv" % rel(exportdir)) subprocess.check_call([ sys.executable, script, "--serverdir", str(serverdir), "--export", str(exportdir) ]) tw.sep("-", "importing from %s" % rel(exportdir)) tw.line("importing into server version: %s" % server_version) subprocess.check_call([ sys.executable, script, "--serverdir", str(newdir), "--import", str(exportdir) ]) tw.sep("-", "replacing serverstate") backup_dir = serverdir + "-backup" if backup_dir.check(): tw.line("backup dir exists, not creating backup", bold=True) else: tw.line("moving serverstate to backupdir: %s" % (backup_dir), bold=True) serverdir.move(backup_dir) if serverdir.check(): tw.line("removing serverstate: %s" % (serverdir)) serverdir.remove() tw.line("copying new serverstate to serverdir", bold=True) newdir.move(serverdir) serverdir.join(".serverversion").read() tw.line("cleanup: removing exportdir: %s" % exportdir) tw.line("have fun serving the new state :)") exportdir.remove()
def import_filedesc(self, stage, filedesc): assert stage.ixconfig["type"] != "mirror" rel = filedesc["relpath"] projectname = filedesc["projectname"] p = self.import_rootdir.join(rel) assert p.check(), p if filedesc["type"] == "releasefile": mapping = filedesc["entrymapping"] if self.dumpversion == "1": # previous versions would not add a version attribute version = BasenameMeta(p.basename).version else: version = filedesc["version"] entry = stage.store_releasefile(projectname, version, p.basename, p.read("rb"), last_modified=mapping["last_modified"]) assert entry.md5 == mapping["md5"] self.import_pre2_toxresults(stage, entry) elif filedesc["type"] == "doczip": basename = os.path.basename(rel) name, version, suffix = splitbasename(basename) stage.store_doczip(name, version, p.read("rb")) elif filedesc["type"] == "toxresult": linkstore = stage.get_linkstore_perstage(filedesc["projectname"], filedesc["version"]) link, = linkstore.get_links(entrypath=filedesc["for_entrypath"]) stage.store_toxresult(link, json.loads(p.read("rb").decode("utf8"))) else: fatal("unknown file type: %s" % (type,))
def list_packages_with_serial(self): try: r = self._http.get(self._url, stream=True) except self._http.RequestException: threadlog.exception("proxy request failed, no connection?") else: if r.status_code == 200: return load(r.raw) from devpi_server.main import fatal fatal("replica: could not get serials from remote")
def do_export(path, xom): path = py.path.local(path) tw = py.io.TerminalWriter() if path.check() and path.listdir(): fatal("export directory %s must not exist or be empty" % path) path.ensure(dir=1) tw.line("creating %s" % path) dumper = Exporter(tw, xom) dumper.dump_all(path) return 0
def import_all(self, path): self.import_rootdir = path self.import_data = self.read_json(path.join("dataindex.json")) dumpversion = self.import_data["dumpversion"] if dumpversion != "1": fatal("incompatible dumpversion: %r" %(dumpversion,)) self.import_users = self.import_data["users"] self.import_indexes = self.import_data["indexes"] self.xom.config.secret = secret = self.import_data["secret"] self.xom.config.secretfile.write(secret) # first create all users for user, userconfig in self.import_users.items(): self.db._user_set(user, userconfig) # memorize index inheritance structure tree = IndexTree() tree.add("root/pypi") # a root index for stagename, import_index in self.import_indexes.items(): bases = import_index["indexconfig"].get("bases") tree.add(stagename, bases) # create stages in inheritance/root-first order stages = [] for stagename in tree.iternames(): if stagename == "root/pypi": assert self.db.index_exists(stagename) continue import_index = self.import_indexes[stagename] indexconfig = import_index["indexconfig"] stage = self.db.create_stage(stagename, None, **indexconfig) stages.append(stage) del tree # create projects and releasefiles for each index for stage in stages: assert stage.name != "root/pypi" import_index = self.import_indexes[stage.name] projects = import_index["projects"] #normalized = self.normalize_index_projects(projects) for project, versions in projects.items(): for version, versiondata in versions.items(): assert "+files" not in versiondata if not versiondata.get("version"): name = versiondata["name"] self.warn("%r: ignoring project metadata without " "version information. " % name) continue stage.register_metadata(versiondata) # import release files for filedesc in import_index["files"]: self.import_filedesc(stage, filedesc)
def devpiserver_cmdline_run(xom): if xom.config.args.recreate_search_index: if not xom.config.args.offline_mode: fatal("The --recreate-search-index option requires the --offline option.") ix = get_indexer(xom.config) ix.delete_index() indexer = get_indexer(xom.config) indexer.update_projects(iter_projects(xom), clear=True) # only exit when indexing explicitly return 0 # allow devpi-server to run return None
def import_all(self, path): self.import_rootdir = path self.import_data = self.read_json(path.join("dataindex.json")) dumpversion = self.import_data["dumpversion"] if dumpversion != "1": fatal("incompatible dumpversion: %r" % (dumpversion, )) self.import_users = self.import_data["users"] self.import_indexes = self.import_data["indexes"] self.xom.config.secret = secret = self.import_data["secret"] self.xom.config.secretfile.write(secret) # first create all users for user, userconfig in self.import_users.items(): self.db._user_set(user, userconfig) # memorize index inheritance structure tree = IndexTree() tree.add("root/pypi") # a root index for stagename, import_index in self.import_indexes.items(): bases = import_index["indexconfig"].get("bases") tree.add(stagename, bases) # create stages in inheritance/root-first order stages = [] for stagename in tree.iternames(): if stagename == "root/pypi": assert self.db.index_exists(stagename) continue import_index = self.import_indexes[stagename] indexconfig = import_index["indexconfig"] stage = self.db.create_stage(stagename, None, **indexconfig) stages.append(stage) del tree # create projects and releasefiles for each index for stage in stages: assert stage.name != "root/pypi" import_index = self.import_indexes[stage.name] projects = import_index["projects"] #normalized = self.normalize_index_projects(projects) for project, versions in projects.items(): for version, versiondata in versions.items(): assert "+files" not in versiondata if not versiondata.get("version"): name = versiondata["name"] self.warn("%r: ignoring project metadata without " "version information. " % name) continue stage.register_metadata(versiondata) # import release files for filedesc in import_index["files"]: self.import_filedesc(stage, filedesc)
def do_upgrade(xom): tw = py.io.TerminalWriter() serverdir = xom.config.serverdir exportdir = serverdir + "-export" if exportdir.check(): tw.line("removing exportdir: %s" % exportdir) exportdir.remove() newdir = serverdir + "-import" script = sys.argv[0] def rel(p): return py.path.local().bestrelpath(p) state_version = xom.get_state_version() tw.sep("-", "exporting to %s" % rel(exportdir)) if Version(state_version) > Version(server_version): fatal("%s has state version %s which is newer than what we " "can handle (%s)" %( xom.config.serverdir, state_version, server_version)) elif Version(state_version) < Version(server_version): tw.line("creating server venv %s ..." % state_version) tmpdir = py.path.local.make_numbered_dir("devpi-upgrade") venv = create_server_venv(tw, state_version, tmpdir) tw.line("server venv %s created: %s" % (server_version, tmpdir)) venv.check_call(["devpi-server", "--serverdir", str(serverdir), "--export", str(exportdir)]) else: #tw.sep("-", "creating venv" % rel(exportdir)) subprocess.check_call([sys.executable, script, "--serverdir", str(serverdir), "--export", str(exportdir)]) tw.sep("-", "importing from %s" % rel(exportdir)) tw.line("importing into server version: %s" % server_version) subprocess.check_call([sys.executable, script, "--serverdir", str(newdir), "--import", str(exportdir)]) tw.sep("-", "replacing serverstate") backup_dir = serverdir + "-backup" if backup_dir.check(): tw.line("backup dir exists, not creating backup", bold=True) else: tw.line("moving serverstate to backupdir: %s" % (backup_dir), bold=True) serverdir.move(backup_dir) if serverdir.check(): tw.line("removing serverstate: %s" % (serverdir)) serverdir.remove() tw.line("copying new serverstate to serverdir", bold=True) newdir.move(serverdir) serverdir.join(".serverversion").read() tw.line("cleanup: removing exportdir: %s" % exportdir) tw.line("have fun serving the new state :)") exportdir.remove()
def do_import(path, xom): logging.basicConfig(level=logging.INFO, format='%(message)s') path = py.path.local(path) tw = py.io.TerminalWriter() if not path.check(): fatal("path for importing not found: %s" %(path)) if not xom.db.is_empty(): fatal("serverdir must not contain users or stages: %s" % xom.config.serverdir) importer = Importer(tw, xom) importer.import_all(path) return 0
def __init__(self, config, settings): if 'path' not in settings: index_path = config.serverdir.join('.indices') else: index_path = settings['path'] if not os.path.isabs(index_path): fatal("The path for Whoosh index files must be absolute.") index_path = py.path.local(index_path) index_path.ensure_dir() log.info("Using %s for Whoosh index files." % index_path) self.index_path = index_path.strpath self.indexer_thread = None self.shared_data = None self.xom = None
def do_import(path, xom): logging.basicConfig(level=logging.INFO, format='%(message)s') path = py.path.local(path) tw = py.io.TerminalWriter() if not path.check(): fatal("path for importing not found: %s" % (path)) if not xom.db.is_empty(): fatal("serverdir must not contain users or stages: %s" % xom.config.serverdir) importer = Importer(tw, xom) importer.import_all(path) return 0
def import_filedesc(self, stage, filedesc): assert stage.ixconfig["type"] != "mirror" rel = filedesc["relpath"] projectname = filedesc["projectname"] p = self.import_rootdir.join(rel) assert p.check(), p if filedesc["type"] == "releasefile": mapping = filedesc["entrymapping"] if self.dumpversion == "1": # previous versions would not add a version attribute version = BasenameMeta(p.basename).version else: version = filedesc["version"] link = stage.store_releasefile(projectname, version, p.basename, p.read("rb"), last_modified=mapping["last_modified"]) # devpi-server-2.1 exported with md5 checksums if "md5" in mapping: assert "hash_spec" not in mapping mapping["hash_spec"] = "md5=" + mapping["md5"] hash_algo, hash_value = parse_hash_spec(mapping["hash_spec"]) digest = hash_algo(link.entry.file_get_content()).hexdigest() assert digest == hash_value # note that the actual hash_type used within devpi-server is not # determined here but in store_releasefile/store_doczip/store_toxresult etc elif filedesc["type"] == "doczip": version = filedesc["version"] link = stage.store_doczip(projectname, version, p.read("rb")) elif filedesc["type"] == "toxresult": linkstore = stage.get_linkstore_perstage(filedesc["projectname"], filedesc["version"]) # we can not search for the full relative path because # it might use a different checksum basename = posixpath.basename(filedesc["for_entrypath"]) link, = linkstore.get_links(basename=basename) link = stage.store_toxresult(link, json.loads(p.read("rb").decode("utf8"))) else: fatal("unknown file type: %s" % (type,)) history_log = filedesc.get('log') if history_log is None: link.add_log('upload', '<import>', dst=stage.name) else: link.add_logs(history_log)
def import_filedesc(self, stage, filedesc): assert stage.ixconfig["type"] != "mirror" rel = filedesc["relpath"] #projectname = filedesc["projectname"] p = self.import_rootdir.join(rel) assert p.check(), p if filedesc["type"] == "releasefile": mapping = filedesc["entrymapping"] entry = stage.store_releasefile(p.basename, p.read("rb"), last_modified=mapping["last_modified"]) assert entry.md5 == mapping["md5"] assert entry.size == mapping["size"] self.import_attachments(entry.md5) elif filedesc["type"] == "doczip": basename = os.path.basename(rel) name, version, suffix = splitbasename(basename) stage.store_doczip(name, version, p.open("rb")) else: fatal("unknown file type: %s" % (type,))
def import_filedesc(self, stage, filedesc): assert stage.ixconfig["type"] != "mirror" rel = filedesc["relpath"] #projectname = filedesc["projectname"] p = self.import_rootdir.join(rel) assert p.check(), p if filedesc["type"] == "releasefile": mapping = filedesc["entrymapping"] entry = stage.store_releasefile( p.basename, p.read("rb"), last_modified=mapping["last_modified"]) assert entry.md5 == mapping["md5"] assert entry.size == mapping["size"] self.import_attachments(entry.md5) elif filedesc["type"] == "doczip": basename = os.path.basename(rel) name, version, suffix = splitbasename(basename) stage.store_doczip(name, version, p.open("rb")) else: fatal("unknown file type: %s" % (type, ))
def load_name2serials(self, proxy): name2serials = load_from_file(self.path_name2serials, {}) if name2serials: threadlog.info("reusing already cached name/serial list") ensure_unicode_keys(name2serials) else: threadlog.info("retrieving initial name/serial list") name2serials = proxy.list_packages_with_serial() if name2serials is None: from devpi_server.main import fatal fatal("mirror initialization failed: " "pypi.python.org not reachable") ensure_unicode_keys(name2serials) dump_to_file(name2serials, self.path_name2serials) # trigger anything (e.g. web-search indexing) that wants to # look at the initially loaded serials if not self.xom.is_replica(): with self.xom.keyfs.transaction(write=True): with self.xom.keyfs.PYPI_SERIALS_LOADED.update(): pass return name2serials
def do_import(path, xom): logging.basicConfig(level=logging.INFO, format='%(message)s') path = py.path.local(path) tw = py.io.TerminalWriter() if not path.check(): fatal("path for importing not found: %s" %(path)) with xom.keyfs.transaction(): if not xom.model.is_empty(): fatal("serverdir must not contain users or stages: %s" % xom.config.serverdir) importer = Importer(tw, xom) importer.import_all(path) if xom.config.args.wait_for_events: importer.wait_for_events() else: importer.warn( "Update events have not been processed, when you start the server " "they will be processed in order. If you use devpi-web, then the " "search index and documentation will gradually update until all " "events have been processed.") return 0
def devpiserver_cmdline_run(xom): docs_path = xom.config.args.documentation_path if docs_path is not None and not os.path.isabs(docs_path): fatal("The path for unzipped documentation must be absolute.") # allow devpi-server to run return None
def import_all(self, path): self.import_rootdir = path self.import_data = self.read_json(path.join("dataindex.json")) self.dumpversion = self.import_data["dumpversion"] if self.dumpversion not in ("1", "2"): fatal("incompatible dumpversion: %r" %(self.dumpversion,)) uuid = self.import_data.get("uuid") if uuid is not None: self.xom.config.set_uuid(uuid) self.import_users = self.import_data["users"] self.import_indexes = self.import_data["indexes"] self.xom.config.secret = secret = self.import_data["secret"] self.xom.config.secretfile.write(secret) # first create all users for username, userconfig in self.import_users.items(): with self.xom.keyfs.transaction(write=True): if username == "root": user = self.xom.model.get_user(username) else: user = self.xom.model.create_user(username, password="") user._set(userconfig) # memorize index inheritance structure tree = IndexTree() tree.add("root/pypi") # a root index for stagename, import_index in self.import_indexes.items(): bases = import_index["indexconfig"].get("bases") tree.add(stagename, bases) # create stages in inheritance/root-first order stages = [] for stagename in tree.iternames(): with self.xom.keyfs.transaction(write=True): if stagename == "root/pypi": assert self.xom.model.getstage(stagename) continue import_index = self.import_indexes[stagename] indexconfig = import_index["indexconfig"] if 'uploadtrigger_jenkins' in indexconfig: if not indexconfig['uploadtrigger_jenkins']: # remove if not set, so if the trigger was never # used, you don't need to install the plugin del indexconfig['uploadtrigger_jenkins'] user, index = stagename.split("/") user = self.xom.model.get_user(user) stage = user.create_stage(index, **indexconfig) stages.append(stage) del tree # create projects and releasefiles for each index for stage in stages: assert stage.name != "root/pypi" import_index = self.import_indexes[stage.name] projects = import_index["projects"] #normalized = self.normalize_index_projects(projects) for project, versions in projects.items(): for version, versiondata in versions.items(): with self.xom.keyfs.transaction(write=True): assert "+elinks" not in versiondata versiondata.pop('+doczip', None) versiondata.pop(':action', None) assert not any(True for x in versiondata if x.startswith('+')) if not versiondata.get("version"): name = versiondata["name"] self.warn("%r: version metadata has no explicit " "version, setting derived %r" % (name, version)) versiondata["version"] = version stage.set_versiondata(versiondata) # import release files for filedesc in import_index["files"]: with self.xom.keyfs.transaction(write=True): self.import_filedesc(stage, filedesc)
def import_all(self, path): self.import_rootdir = path self.import_data = self.read_json(path.join("dataindex.json")) self.dumpversion = self.import_data["dumpversion"] if self.dumpversion not in ("1", "2"): fatal("incompatible dumpversion: %r" %(self.dumpversion,)) self.import_users = self.import_data["users"] self.import_indexes = self.import_data["indexes"] self.xom.config.secret = secret = self.import_data["secret"] self.xom.config.secretfile.write(secret) # first create all users for username, userconfig in self.import_users.items(): with self.xom.keyfs.transaction(write=True): if username == "root": user = self.xom.model.get_user(username) else: user = self.xom.model.create_user(username, password="") user._set(userconfig) # memorize index inheritance structure tree = IndexTree() tree.add("root/pypi") # a root index for stagename, import_index in self.import_indexes.items(): bases = import_index["indexconfig"].get("bases") tree.add(stagename, bases) # create stages in inheritance/root-first order stages = [] for stagename in tree.iternames(): with self.xom.keyfs.transaction(write=True): if stagename == "root/pypi": assert self.xom.model.getstage(stagename) continue import_index = self.import_indexes[stagename] indexconfig = import_index["indexconfig"] user, index = stagename.split("/") user = self.xom.model.get_user(user) stage = user.create_stage(index, **indexconfig) stages.append(stage) del tree # create projects and releasefiles for each index for stage in stages: assert stage.name != "root/pypi" import_index = self.import_indexes[stage.name] projects = import_index["projects"] #normalized = self.normalize_index_projects(projects) for project, versions in projects.items(): for version, versiondata in versions.items(): with self.xom.keyfs.transaction(write=True): assert "+elinks" not in versiondata if '+doczip' in versiondata: del versiondata['+doczip'] assert not any(True for x in versiondata if x.startswith('+')) if not versiondata.get("version"): name = versiondata["name"] self.warn("%r: version metadata has no explicit " "version, setting derived %r" % (name, version)) versiondata["version"] = version stage.set_versiondata(versiondata) # import release files for filedesc in import_index["files"]: with self.xom.keyfs.transaction(write=True): self.import_filedesc(stage, filedesc)