def test_resolvable_directory(): builder = ResolverOptionsBuilder() with make_source_dir(name='my_project') as td: rdir = ResolvableDirectory.from_string(td, builder) assert rdir.name == pkg_resources.safe_name('my_project') assert rdir.extras() == [] rdir = ResolvableDirectory.from_string(td + '[extra1,extra2]', builder) assert rdir.name == pkg_resources.safe_name('my_project') assert rdir.extras() == ['extra1', 'extra2']
def __init__(self, filename, comment, metadata, python_version, filetype): self.filename = filename self.basefilename = os.path.basename(filename) self.comment = comment self.metadata = metadata self.python_version = python_version self.filetype = filetype self.safe_name = pkg_resources.safe_name(metadata.name) self.signed_filename = self.filename + '.asc' self.signed_basefilename = self.basefilename + '.asc' self.gpg_signature = None blake2_256_hash = None if pyblake2 is not None: blake2_256_hash = pyblake2.blake2b(digest_size=256 // 8) # NOTE(sigmavirus24): We may or may not be able to use blake2 so let's # either use the methods or lambdas to do nothing. blake_update = getattr(blake2_256_hash, 'update', lambda *args: None) blake_hexdigest = getattr(blake2_256_hash, 'hexdigest', lambda: None) md5_hash = hashlib.md5() sha2_hash = hashlib.sha256() with open(filename, "rb") as fp: for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''): md5_hash.update(content) sha2_hash.update(content) blake_update(content) self.md5_digest = md5_hash.hexdigest() self.sha2_digest = sha2_hash.hexdigest() self.blake2_256_digest = blake_hexdigest()
def validate_strict_metadata(metadata): if metadata['name'] != pkg_resources.safe_name(metadata['name']): raise RuntimeError("Package name '%s' contains illegal character(s); " "consider changing to '%s'" % (metadata['name'], pkg_resources.safe_name(metadata['name']))) for section, reqs in ([(k, metadata[k]) for k in REQUIRES_KEYS] + [('extras_require[%s]' % k, v) for k, v in metadata['extras_require'].items()]): for s in reqs: req = pkg_resources.Requirement.parse(s) if req.unsafe_name != req.project_name: raise RuntimeError("Invalid name '%s' in requirement '%s' for " "'%s' of '%s'; consider changing to '%s'" % (req.unsafe_name, s, section, metadata['name'], req.project_name))
def setup(args=None): # make sure our directory is at the front of sys.path module = metadata('backupmgr') # get the version and description from the source version = module.__version__ description = pydoc.splitdoc(pydoc.getdoc(module))[0] author, author_email = email.utils.parseaddr(module.__authors__[0]) # get the long description from README-type files long_description = [] for path in READMES: with open(os.path.join(SRCROOT, path), 'r') as fh: long_description.append(fh.read()) long_description = '\n'.join([ x for x in long_description if x ]) # use setuptools to do the rest setuptools.setup( name=pkg_resources.safe_name(module.__name__), packages=setuptools.find_packages(), version=version, description=description, author=author, author_email=author_email, zip_safe=True, #url=None, install_requires=["python-dateutil"], long_description=long_description, license='BSD', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers' ])
def project_vars(self): output_dir = self.output_path project_name = os.path.basename(os.path.split(output_dir)[1]) pkg_name = _bad_chars_re.sub( '', project_name.lower().replace('-', '_')) safe_name = pkg_resources.safe_name(project_name) egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == 'dev': pyramid_docs_branch = 'master' else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r'(\d+\.\d+)', self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = 'latest' return { 'project': project_name, 'package': pkg_name, 'egg': egg_name, 'pyramid_version': pyramid_version, 'pyramid_docs_branch': pyramid_docs_branch, }
def project_vars(self): output_dir = self.output_path project_name = os.path.basename(os.path.split(output_dir)[1]) if self.options.package_name is None: pkg_name = _bad_chars_re.sub("", project_name.lower().replace("-", "_")) safe_name = pkg_resources.safe_name(project_name) else: pkg_name = self.options.package_name safe_name = pkg_name egg_name = pkg_resources.to_filename(safe_name) # get pyramid package version pyramid_version = self.pyramid_dist.version ## map pyramid package version of the documentation branch ## # if version ends with 'dev' then docs version is 'master' if self.pyramid_dist.version[-3:] == "dev": pyramid_docs_branch = "master" else: # if not version is not 'dev' find the version.major_version string # and combine it with '-branch' version_match = re.match(r"(\d+\.\d+)", self.pyramid_dist.version) if version_match is not None: pyramid_docs_branch = "%s-branch" % version_match.group() # if can not parse the version then default to 'latest' else: pyramid_docs_branch = "latest" return { "project": project_name, "package": pkg_name, "egg": egg_name, "pyramid_version": pyramid_version, "pyramid_docs_branch": pyramid_docs_branch, }
def extract_project_package(install_folder, temp_folder, site_name, prefix, version): package_archive = '{}-{}-{}.tar.gz'.format(prefix, safe_name(site_name), version) print(green("extract project package: {}".format(package_archive))) run('tar --strip-components=1 --directory={} -xzf {}'.format( install_folder, os.path.join(temp_folder, package_archive) ))
def download_package(site_name, prefix, version, temp_folder, venv_folder): package_name = '{}-{}=={}'.format(prefix, safe_name(site_name), version) print(green("download package: {}".format(package_name))) pip_bin = os.path.join(venv_folder, 'bin', 'pip') run('{} install --download={} --no-deps {}'.format( pip_bin, temp_folder, package_name ))
def test_upload_fails_with_wrong_filename(self, pyramid_config, db_request): pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") RoleFactory.create(user=user, project=project) filename = "nope-{}.tar.gz".format(release.version) db_request.POST = MultiDict({ "metadata_version": "1.2", "name": project.name, "version": release.version, "filetype": "sdist", "md5_digest": "nope!", "content": pretend.stub( filename=filename, file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)), ), }) with pytest.raises(HTTPBadRequest) as excinfo: pypi.file_upload(db_request) resp = excinfo.value assert resp.status_code == 400 assert resp.status == ( "400 The filename for {!r} must start with {!r}.".format( project.name, pkg_resources.safe_name(project.name).lower(), ) )
def __call__(self): post = dict(self.request.POST) metadata = dict([(key, value) for key, value in post.items() if key != 'content']) if post.get(':action') == "file_upload": name = pkg_resources.safe_name(post.get('name')) version = post.get('version') content = post.get('content') md5_digest = post.get('md5_digest') package = self.context[name] if self.context.get(name) else Package(name) package.__parent__ = self.context self.context[name] = package release = package[version] if package.releases.get(version) else Release(name=version, version=version, metadata=metadata) release.__parent__ = package self.context[name][version] = release if release.release_files.get(content.filename): return HTTPConflict() release_file = ReleaseFile( filename=content.filename, content=content.file.read(), md5_digest=md5_digest, status=STATUS.local, ) release = self.context[name][version] self.context[name][version][content.filename] = release_file return Response() else: return HTTPBadRequest()
def take_action(self, opts): if not opts.package: package = opts.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) opts.package = package opts.name = pkg_resources.safe_name(opts.name) opts.project = opts.name env = pkg_resources.Environment() if opts.name.lower() in env: print('The name "%s" is already in use by' % opts.name) for dist in env[opts.name]: print(dist) return try: if imp.find_module(opts.package): print('The package name "%s" is already in use' % opts.package) return except ImportError: pass if os.path.exists(opts.name): print('A directory called "%s" already exists. Exiting.' % opts.name) return self.run_template(opts.name, opts) os.chdir(opts.name)
def _read_info_json(self, pypm_file): """Read cached info.json (as dict) from the .d/ directory If cached version is missing, read from the package file itself, which would be an expensive operation. """ info_json_loc = xjoin(pypm_file + '.d', 'info.json') try: s = self.repository.uptree.open_and_read(info_json_loc) except IOError as e: # There seems to be no .d/info.json file; perhaps this is a # 'custom' that is not managed by pypm-builder. So let's extract # info.json from the package file (.pypm) even though that is # expensive (so we will also warn the user) LOG.warn( 'Cache file (.d/info.json) missing; retrieving from %s', pypm_file) s = PackageFile(pypm_file).retrieve_info_json() d = json.loads(s) # It is not clear whether info.json's "name" field is canonical (i.e., # lower case safe version of name, that is guarateed to be same). # Therefore, we do one final conversion there. d['name'] = pkg_resources.safe_name(d['name']).lower() return d
def list_package_candidates(verbose='yes'): """List the packages that are available for deployment""" ignore_dirs = api.env.ignore_dirs + GLOBAL_IGNORES get_info = api.env.setuptools.get_package_info # find all the packages in the given package dirs for package_dir in api.env.package_dirs: abs_package_dir = os.path.abspath(os.path.expanduser(package_dir)) items = os.listdir(abs_package_dir) for item in items: if item in ignore_dirs: continue package_path = os.path.join(abs_package_dir, item) if not os.path.isdir(package_path): continue with api.lcd(package_path): # get the actual package name and version via mkrelease # TODO: handle dev release pkg_name, pkg_ver = get_info(package_path, develop=False) safe_pkg_name = pkg_resources.safe_name(pkg_name) if safe_pkg_name != pkg_name: msg = "\nSafe package name for %s used: %s" print colors.yellow(msg % (pkg_name, safe_pkg_name)) api.env.package_info.setdefault(safe_pkg_name, {}) api.env.package_info[safe_pkg_name]['path'] = package_path api.env.package_info[safe_pkg_name]['version'] = pkg_ver api.env.package_info[safe_pkg_name]['unsafe_name'] = pkg_name api.env.packages.append(safe_pkg_name) if verbose.lower() in TRUISMS: print """ Packages available: %s """ % "\n".join(api.env.packages)
def get_package_location(self, pkg_name): key = pkg_resources.safe_name(pkg_name).lower() cmd = r""" from pkg_resources import working_set print(working_set.by_key[%r].location)""" % key return self.run_python_cmd(['-c', cmd], capture_stdout=True, cwd='/' ).strip()
def get_pip_pkg(name, stage=None, command="pip"): try: output = call(command, "show", pkg_resources.safe_name(name), stage=stage, errok=True) for line in output.split("\n"): if line.startswith("Location: "): return os.path.join(line.split(": ")[1], name) except ShellError: return None
def run(self): if not self.distribution.get_name() == 'UNKNOWN': self.run_command('egg_info') self.banner("Dependency Graph: note - includes only installed " "packages") all_packages = dependency.all_packages( exclusions=self.exclude, include_third_party=self.third_party, exclude_pinned=False) if not all_packages: log.info("No matching packages to render") return if self.distribution.get_name() == 'UNKNOWN' and not self.args: # Pick any package and set the 'everything' flag if nothing was # specified pkg = all_packages.keys()[0] self.everything = True self.args = [pkg] if 'UNKNOWN' in all_packages: del all_packages['UNKNOWN'] roots = [] if self.args: roots = [safe_name(i) for i in self.args] for i in roots: if not i in all_packages.keys(): raise DistutilsOptionError("Unknown package: %s" % i) if not roots: roots = [self.distribution.get_name()] self.banner("Rendering using %s" % self.renderer) if self.renderer in ['ascii', 'graphviz']: # TODO: use nx digraph as below, retire get_targets src, eggs = dependency.get_targets(roots, all_packages, everything=self.everything, immediate_deps=True, follow_all=True, include_eggs=True, include_source=True,) graph.draw_graph(inclusions=src + eggs, renderer=self.renderer, outfile=self.out) else: nx_graph, _ = dependency.get_graph_from_ws(working_set) if self.renderer == 'd3': graph.draw_networkx_with_d3(nx_graph, self.third_party, self.out) elif self.renderer == 'pydot': self.fetch_build_eggs(['pydot']) graph.draw_networkx_with_pydot(nx_graph, self.third_party, self.out, self.requirements)
def main(requirements_path): this_requirements_file = os.path.basename(requirements_path) parsed = parse_requirements( requirements_path, session=pip.download.PipSession()) requirements = [ req for req in parsed # Skip packages from other requirements files if this_requirements_file in req.comes_from] reverse_requirements = {} nested_requirements = set() # Fetch nested reqirements lines, this is mostly copied from # pip so that we support stuff "correctly". Unfortunately there # isn't any good API in pip for it :-/ parser = build_parser() defaults = parser.get_default_values() with open(requirements_path) as fobj: for line in fobj: args_str, options_str = break_args_options(line) opts, _ = parser.parse_args(shlex.split(options_str), defaults) if opts.requirements: nested_requirements.update(opts.requirements) # Build reverse requirements to be able to add a note on who is depending # on what for req in requirements: reverse_requirements[safe_name(req.name)] = rdeps(req.name) output = [] output.extend('-r %s' % req for req in nested_requirements) output.append('') # Let's output the updated, fixed and more correct requirements version for req in sorted(requirements, key=lambda x: safe_name(x.name)): if reverse_requirements.get(safe_name(req.name)): msg = '# %s is required by %s' % ( safe_name(req.name), ', '.join(reverse_requirements[safe_name(req.name)])) output.append(msg) output.append('%s%s' % (safe_name(req.name), str(req.specifier))) with open(requirements_path, 'wb') as fobj: fobj.write('\n'.join(output)) with open(requirements_path, 'a') as fobj: fobj.write('\n') for req in requirements: run_single_package( '%s%s' % (safe_name(req.name), str(req.specifier)), requirements_path, 'sha256', # Workaround a bug or feature in hashin which would avoid # fetching wheels e.g for some packages. python_versions=['py27', '2.7'], verbose=True)
def pre_run(self): self.egg_name = pkg_resources.safe_name(self.distribution.get_name()) self.egg_info = "%s.egg-info" % self.egg_name if (not os.path.exists(self.egg_info) or _newer_requires_files(self.egg_info)): ei_cmd = self.get_finalized_command('egg_info') ei_cmd.run() self.install_test_requirements() _copy_test_requires_to(self.egg_info)
def scan(link): # Process a URL to see if it's for a package page if link.startswith(self.index_url): parts = list(map(urllib.parse.unquote, link[len(self.index_url) :].split("/"))) if len(parts) == 2 and "#" not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver) return None, None
def __init__(self, name, serial, mirror): self.name = name self.serial = serial self.normalized_name = canonicalize_name(name).encode("utf-8") # This is really only useful for pip 8.0 -> 8.1.1 self.normalized_name_legacy = \ pkg_resources.safe_name(name).lower().encode("utf-8") self.encoded_name = self.name.encode('utf-8') self.encoded_first = self.name[0].encode('utf-8') self.quoted_name = quote(self.encoded_name) self.mirror = mirror
def patch_missing_pkg_info(self, attrs): # Fake up a replacement for the data that would normally come from # PKG-INFO, but which might not yet be built if this is a fresh # checkout. # if not attrs or 'name' not in attrs or 'version' not in attrs: return key = pkg_resources.safe_name(str(attrs['name'])).lower() dist = pkg_resources.working_set.by_key.get(key) if dist is not None and not dist.has_metadata('PKG-INFO'): dist._version = pkg_resources.safe_version(str(attrs['version'])) self._patched_dist = dist
def get_dist(name, ws=None): """ Returns a distribution by name from the given working set. Uses the global ws if unspecified. """ if not ws: ws = pkg_resources.working_set res = [i for i in ws if pkg_resources.safe_name(name) == i.project_name] if len(res) > 1: raise DependencyError("More than one dist matches the name %r in working set" % name) if res: return res[0] return None
def read_all_revisions(self, dist): """Read revision from egg directory.""" for egg_info_dir in [os.path.join(dist.location, 'EGG-INFO'), os.path.join(dist.location, '%s.egg-info' % dist.project_name)]: all_revs_fname = os.path.join(egg_info_dir, 'allrevisions.txt') if os.path.exists(all_revs_fname): revisions = read_allrevisions_file(all_revs_fname) for rev_data in revisions: if pkg_resources.safe_name(rev_data[0]) == dist.project_name: return tuple(rev_data) # name,version,url,rev return None
def satisfies(self, requirement): """Determine whether this package matches the requirement. :param requirement: The requirement to compare this Package against :type requirement: string or :class:`pkg_resources.Requirement` :returns: True if the package matches the requirement, otherwise False """ requirement = maybe_requirement(requirement) link_name = safe_name(self.name).lower() if link_name != requirement.key: return False return self.raw_version in requirement
def get_python_package_installed_version(name, from_cache=True): global installed_package_name2version if not from_cache: installed_package_name2version = None if installed_package_name2version is None: installed_package_name2version = {} pip_freeze_output = shell_execute('pip freeze', capture=True, debug=True) for line in pip_freeze_output.splitlines(): parts = line.split('==', 1) if len(parts) == 2: installed_package_name2version[parts[0]] = parts[1] return installed_package_name2version.get(safe_name(name))
def developVersions(self): self.release_eggs() #ensure we've got self.develop_versions specs = {} #have to use lower since eggs are case insensitive specs.update(dict([(pkg_resources.safe_name(p),v) for p,v,e in self.local_eggs.values()])) res = "" for name, version in sorted(specs.items()): res += "\n%s=%s" % (name,version) return res
def _resolve_roots(self, all_packages): roots = [] if self.args: roots = [safe_name(i) for i in self.args] for i in roots: if not i in all_packages.keys(): raise DistutilsOptionError("Unknown package: %s" % i) if not roots: roots = [self.distribution.get_name()] return roots
def _project_name(self): """Return the inner Requirement's "unsafe name". Raise ValueError if there is no name. """ name = getattr(self._req.req, 'project_name', '') if name: return name name = getattr(self._req.req, 'name', '') if name: return safe_name(name) raise ValueError('Requirement has no project_name.')
def egg_name(self): wheel = self.wheel name = pkg_resources.safe_name(wheel.name) version = pkg_resources.safe_version(wheel.version) pyver = 'py%d.%d' % sys.version_info[:2] bits = [pkg_resources.to_filename(name), pkg_resources.to_filename(version), pyver] if any(abi != 'none' or arch != 'any' for pyver, abi, arch in wheel.tags): # not pure python bits.append(pkg_resources.get_build_platform()) return '-'.join(bits) + '.egg'
def read_all_revisions(self, dist): """Read revision from egg directory.""" from path import path dist_dir = path(dist.location) for egg_info_dir in [dist_dir / 'EGG-INFO', dist_dir / '{}.egg-info'.format(dist.project_name)]: all_revs_fname = egg_info_dir / 'allrevisions.txt' if all_revs_fname.exists(): revisions = read_allrevisions_file(all_revs_fname) for rev_data in revisions: if pkg_resources.safe_name(rev_data[0]) == dist.project_name: return tuple(rev_data) # name,version,url,rev return None
def egg_name(dist_name): return pkg_resources.to_filename(pkg_resources.safe_name(dist_name))
NAME = 'astropy_helpers' VERSION = '3.1.dev' RELEASE = 'dev' not in VERSION generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE) # Use the updated version including the git rev count from astropy_helpers.version import version as VERSION add_exclude_packages(['astropy_helpers.tests']) cmdclass = register_commands(NAME, VERSION, RELEASE) # This package actually doesn't use the Astropy test command del cmdclass['test'] setup( name=pkg_resources.safe_name(NAME), # astropy_helpers -> astropy-helpers version=VERSION, description='Utilities for building and installing Astropy, Astropy ' 'affiliated packages, and their respective documentation.', author='The Astropy Developers', author_email='*****@*****.**', license='BSD', url=' https://github.com/astropy/astropy-helpers', long_description=open('README.rst').read(), classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Framework :: Setuptools Plugin', 'Framework :: Sphinx :: Extension', 'Framework :: Sphinx :: Theme', 'License :: OSI Approved :: BSD License',
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update speicified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise click.BadParameter( "packages argument can't be used together with multple -s or --no-default." ) if not packages: if unconstrained: raise click.BadArgumentUsage( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] context.io.echo("Updating packages: {}.".format(", ".join( context.io.green(v, bold=True) for v in tracked_names))) resolved = do_lock(project, strategy, tracked_names, all_dependencies) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash( "md5") project.write_lockfile(lockfile, False)
def take_action(self, opts): opts.egg_plugins = [] if opts.no_sqlalchemy: opts.sqlalchemy = False if opts.ming: opts.sqlalchemy = False opts.migrations = False if opts.no_auth: opts.auth = False if opts.skip_default_tmpl: opts.kajiki = False if not opts.package: package = opts.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) opts.package = package if opts.tw1: opts.skip_tw = False if opts.auth: if opts.ming: opts.auth = "ming" opts.ming = True else: opts.auth = "sqlalchemy" opts.sqlalchemy = True else: opts.auth = None opts.database = opts.sqlalchemy or opts.ming opts.name = pkg_resources.safe_name(opts.name) opts.project = opts.name env = pkg_resources.Environment() if opts.name.lower() in env: print('The name "%s" is already in use by' % opts.name) for dist in env[opts.name]: print(dist) return import imp try: if imp.find_module(opts.package): print('The package name "%s" is already in use' % opts.package) return except ImportError: pass if os.path.exists(opts.name): print('A directory called "%s" already exists. Exiting.' % opts.name) return opts.cookiesecret = None try: import uuid opts.cookiesecret = str(uuid.uuid4()) except ImportError: import random import base64 import struct opts.cookiesecret = base64.b64encode(''.join( [struct.pack('i', random.randrange(2 ** 31)) for _n in range(6)])).strip() devtools_path = os.path.dirname(os.path.os.path.abspath( os.path.dirname(__file__))) # Workaround for templates ported from Paste # which check for 'True' instead of True template_vars = dict(vars(opts)) #for key, value in template_vars.items(): # if value is True: # template_vars[key] = 'True' template_vars['PY3'] = PY3 template_vars['PYVERSION'] = PYVERSION QuickstartTemplate().run(os.path.join(devtools_path, 'templates', 'turbogears'), opts.name, template_vars) os.chdir(opts.name) try: sys.argv = ['setup.py', 'egg_info'] imp.load_module('setup', *imp.find_module('setup', ['.'])) except: print('Unable to run egg_info for newly created package! Continuing anyway...') print("") # dirty hack to allow "empty" dirs for base, _path, files in os.walk('./'): for filename in files: if filename == 'empty': os.remove(os.path.join(base, filename)) if opts.mako or opts.genshi or opts.jinja or opts.kajiki: package_template_dir = os.path.abspath(os.path.join(opts.package, 'templates')) def overwrite_templates(template_type): print('Writing %s template files to ./%s' % ( template_type, os.path.join(opts.package, 'templates') )) # replace template files with alternative ones alt_template_dir = os.path.join(devtools_path, 'commands', 'quickstart_%s' % template_type) shutil.rmtree(package_template_dir) shutil.copytree(alt_template_dir, package_template_dir) if opts.genshi: overwrite_templates('genshi') elif opts.jinja: overwrite_templates('jinja') elif opts.mako: overwrite_templates('mako') elif opts.kajiki: overwrite_templates('kajiki') if opts.kajiki: # Provide Kajiki as a lingua franca for pluggable apps. print('Adding Kajiki master for pluggable apps') package_template_dir = os.path.abspath(os.path.join(opts.package, 'templates')) alt_template_dir = os.path.join(devtools_path, 'commands', 'quickstart_kajiki') shutil.copy(os.path.join(alt_template_dir, 'master.xhtml'), package_template_dir) if opts.minimal_quickstart: print('Minimal Quickstart requested, throwing away example parts') package_controllers_dir = os.path.abspath(os.path.join(opts.package, 'controllers')) os.unlink(next(glob.iglob(os.path.join(package_controllers_dir, 'secure.py')))) package_template_dir = os.path.abspath(os.path.join(opts.package, 'templates')) os.unlink(next(glob.iglob(os.path.join(package_template_dir, 'data.*')))) os.unlink(next(glob.iglob(os.path.join(package_template_dir, 'environ.*')))) os.unlink(next(glob.iglob(os.path.join(package_template_dir, 'about.*')))) if opts.ming: print('Writing Ming model files to ./%s' % os.path.join( opts.package, 'model')) package_model_dir = os.path.abspath(os.path.join(opts.package, 'model')) ming_model_dir = os.path.join(devtools_path, 'commands', 'model_ming') shutil.copy(os.path.join(ming_model_dir, 'session.py'), package_model_dir) if not opts.migrations: print('Disabling migrations support') # remove existing migrations directory package_migrations_dir = os.path.abspath('migration') shutil.rmtree(package_migrations_dir, ignore_errors=True)
def testSafeName(self): self.assertEqual(safe_name("adns-python"), "adns-python") self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") self.assertNotEqual(safe_name("peak.web"), "peak-web")
def canonical_name(req_name): """Return the canonical form of req_name.""" return pkg_resources.safe_name(req_name).lower()
def normalize(cls, name): return safe_name(name).lower()
def raw_version(self): return safe_name(self._raw_version)
def name(self): return safe_name(self._name)
def testSafeName(self): assert safe_name("adns-python") == "adns-python" assert safe_name("WSGI Utils") == "WSGI-Utils" assert safe_name("WSGI Utils") == "WSGI-Utils" assert safe_name("Money$$$Maker") == "Money-Maker" assert safe_name("peak.web") != "peak-web"
def safe_name(value: str) -> str: return pkg_resources.to_filename(pkg_resources.safe_name(value))
def post(self, name): if not self.check_authentication(): response = make_response("", 401) response.headers["WWW-Authenticate"] = 'Basic realm="Jazzband"' return response # distutils "helpfully" substitutes unknown, but "required" values # with the string "UNKNOWN". This is basically never what anyone # actually wants so we'll just go ahead and delete anything whose # value is UNKNOWN. form_copy = request.form.copy() unknown_found = False for key, value in request.form.items(): if value == "UNKNOWN": unknown_found = True form_copy.pop(key) if unknown_found: request.form = form_copy form = UploadForm(meta={"csrf": False}) validation_order = ["name", "version", "content"] if not form.validate_on_submit(): for field_name in validation_order: if field_name in form.errors: break else: field_name = sorted(form.errors.keys())[0] eject( 400, description="%s: %s" % (field_name, ", ".join(form.errors[field_name])), ) # the upload FileStorage upload_data = form.content.data if upload_data is None: eject(400, description="Upload payload does not have a file.") upload_filename = secure_filename(upload_data.filename) # Make sure that our filename matches the project that it is being # uploaded to. prefix = safe_name(self.project.name).lower() if not safe_name(upload_filename).lower().startswith(prefix): eject( 400, description="The filename for %r must start with %r." % (self.project.name, prefix), ) # Fail if a project upload already exists if ProjectUpload.query.filter_by(filename=upload_filename, project_id=self.project.id).scalar(): eject(400, description="File already exists.") # Store file uploads and calculate hashes with tempfile.TemporaryDirectory() as tmpdir: upload_path = os.path.join(tmpdir, upload_filename) upload_data.stream.seek(0) upload_data.save(upload_path) # Buffer the entire file onto disk, checking the hash of the file # as we go along. with open(upload_path, "rb") as upload_file: file_hashes = { "md5": hashlib.md5(), "sha256": hashlib.sha256(), "blake2_256": hashlib.blake2b(digest_size=256 // 8), } for chunk in iter(lambda: upload_file.read(8096), b""): for hasher in file_hashes.values(): hasher.update(chunk) # Take our hash functions and compute the final hashes for them # now. file_hashes = { method: file_hash.hexdigest().lower() for method, file_hash in file_hashes.items() } # Actually verify the digests that we've gotten. We're going to use # hmac.compare_digest even though we probably don't actually need # to because it's better safe than sorry. In the case of multiple # digests we expect them all to be given. hash_comparisons = [ hmac.compare_digest( getattr(form, "%s_digest" % digest_name).data.lower(), digest_value) for digest_name, digest_value in file_hashes.items() if getattr(form, "%s_digest" % digest_name).data ] if not all(hash_comparisons): eject( 400, description="The digest supplied does not match a digest " "calculated from the uploaded file.", ) # Also buffer the entire signature file to disk. signature = form.gpg_signature.data signature_filename = upload_filename + ".asc" if signature: signature_path = os.path.join(tmpdir, signature_filename) signature.stream.seek(0) signature.save(signature_path) if os.path.getsize(signature_path) > MAX_SIGSIZE: eject(400, description="Signature too large.") # Check whether signature is ASCII armored with open(signature_path, "rb") as signature_file: if not signature_file.read().startswith(SIGNATURE_START): eject( 400, description="PGP signature is not ASCII armored.") version = form.version.data upload = ProjectUpload( version=version, project=self.project, # e.g. acme/2coffee12345678123123123123123123 path=safe_join(self.project.name, file_hashes[PATH_HASHER]), filename=upload_filename, size=os.path.getsize(upload_path), md5_digest=file_hashes["md5"], sha256_digest=file_hashes["sha256"], blake2_256_digest=file_hashes["blake2_256"], form_data=request.form, user_agent=request.user_agent.string, remote_addr=request.remote_addr, ) # make the storage path directory /app/uploads/acme os.makedirs(os.path.dirname(upload.full_path), exist_ok=True) # move the uploaded file to storage path directory shutil.move(upload_path, upload.full_path) # copy the uploaded signature file to storage path directory if signature: shutil.move(signature_path, upload.full_path + ".asc") # write to database upload.save() spinach.schedule(send_new_upload_notifications, self.project.id) spinach.schedule(update_upload_ordering, self.project.id) return "OK"
def command(self): """Quickstarts the new project.""" self.__dict__.update(self.options.__dict__) if self.no_sqlalchemy: self.sqlalchemy = False if self.ming: self.sqlalchemy = False self.migrations = False if self.no_auth: self.auth = False if self.args: self.name = self.args[0] while not self.name: self.name = raw_input("Enter project name: ") if not self.package: package = self.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) if package and self.no_input: self.package = package else: self.package = None while not self.package: self.package = raw_input("Enter package name [%s]: " % package).strip() or package if self.no_input: #defaults self.mako = False self.jinja = False self.kajiki = False self.auth = True if self.jinja is None and self.mako is None and self.kajiki is None: template = None while template is None: template = raw_input( "Would you prefer to use an alternative template system? (m=mako, j=jinja, k=kajiki, n=no [default]): " ) template = dict(m="mako", j="jinja", k='kajiki', n="none").get(template.lstrip()[:1].lower() or 'n') if template == "mako": self.mako = True elif template == "jinja": self.jinja = True elif template == "kajiki": self.kajiki = True elif template is None: print "Please enter an option or n(o)." while self.auth is None: self.auth = raw_input( "Do you need authentication and authorization" " in this project? ([yes]/no): ") self.auth = dict(y=True, n=False).get(self.auth.lstrip()[:1].lower() or 'y') if self.auth is None: print "Please enter y(es) or n(o)." if self.auth: if self.ming: self.auth = "ming" self.ming = True else: self.auth = "sqlalchemy" self.sqlalchemy = True else: self.auth = None self.name = pkg_resources.safe_name(self.name) env = pkg_resources.Environment() if self.name.lower() in env: print 'The name "%s" is already in use by' % self.name, for dist in env[self.name]: print dist return import imp try: if imp.find_module(self.package): print 'The package name "%s" is already in use' % self.package return except ImportError: pass if os.path.exists(self.name): print 'A directory called "%s" already exists. Exiting.' % self.name return self.cookiesecret = None try: import uuid self.cookiesecret = str(uuid.uuid4()) except ImportError: import random import base64 import struct self.cookiesecret = base64.b64encode("".join([ struct.pack('i', random.randrange(2**31)) for x in [1, 2, 3, 4, 5, 6] ])).strip() command = create_distro.CreateDistroCommand("create") cmd_args = [] templates = self.templates.split() for template in templates: cmd_args.append("--template=%s" % template) if self.dry_run: cmd_args.append("--simulate") cmd_args.append("-q") cmd_args.append(self.name) cmd_args.append("sqlalchemy=%s" % self.sqlalchemy) cmd_args.append("ming=%s" % self.ming) cmd_args.append("auth=%s" % self.auth) cmd_args.append("geo=%s" % self.geo) cmd_args.append("tw1=%s" % self.tw1) cmd_args.append("package=%s" % self.package) cmd_args.append("tgversion=%s" % self.version) cmd_args.append("mako=%s" % self.mako) cmd_args.append("jinja=%s" % self.jinja) cmd_args.append("kajiki=%s" % self.kajiki) cmd_args.append("migrations=%s" % self.migrations) cmd_args.append("cookiesecret=%s" % self.cookiesecret) # set the exact ORM-version for the proper requirements # it's extracted from our own requirements, so looking # them up must be in sync (there must be the extras_require named # sqlalchemy) command.run(cmd_args) if self.dry_run: return os.chdir(self.name) sys.argv = ["setup.py", "egg_info"] import imp imp.load_module("setup", *imp.find_module("setup", ["."])) # dirty hack to allow "empty" dirs for base, path, files in os.walk("./"): for file in files: if file == "empty": os.remove(os.path.join(base, file)) #copy over the alternative templates if appropriate if self.mako or self.kajiki or self.jinja: def overwrite_templates(template_type): print 'Writing ' + template_type + ' template files to ./' + os.path.join( self.package, 'templates') #remove existing template files package_template_dir = os.path.abspath( os.path.join(self.package, 'templates')) shutil.rmtree(package_template_dir, ignore_errors=True) #replace template files with alternative ones alt_template_dir = os.path.abspath( os.path.dirname(__file__)) + '/quickstart_' + template_type shutil.copytree(alt_template_dir, package_template_dir) if self.mako: overwrite_templates('mako') elif self.jinja: overwrite_templates('jinja') elif self.kajiki: overwrite_templates('kajiki') if self.ming: print 'Writing Ming model files to ./' + os.path.join( self.package, 'model') package_model_dir = os.path.abspath( os.path.join(self.package, 'model')) ming_model_dir = os.path.abspath( os.path.dirname(__file__)) + '/model_ming' shutil.copy(os.path.join(ming_model_dir, 'session.py'), package_model_dir) if not self.migrations: print 'Disabling migrations support' #remove existing migrations directory package_migrations_dir = os.path.abspath('migration') shutil.rmtree(package_migrations_dir, ignore_errors=True)
def upload(dists, repository, sign, identity, username, password, comment, sign_with, config_file): # Check that a nonsensical option wasn't given if not sign and identity: raise ValueError("sign must be given along with identity") # Determine if the user has passed in pre-signed distributions signatures = dict( (os.path.basename(d), d) for d in dists if d.endswith(".asc")) dists = [i for i in dists if not i.endswith(".asc")] # Get our config from the .pypirc file try: config = get_config(config_file)[repository] except KeyError: raise KeyError( "Missing '{0}' section from the configuration file".format( repository, ), ) parsed = urlparse(config["repository"]) if parsed.netloc in ["pypi.python.org", "testpypi.python.org"]: config["repository"] = urlunparse(("https", ) + parsed[1:]) print("Uploading distributions to {0}".format(config["repository"])) username = get_username(username, config) password = get_password(password, config) session = requests.session() uploads = find_dists(dists) for filename in uploads: # Sign the dist if requested if sign: sign_file(sign_with, filename, identity) # Extract the metadata from the package for ext, dtype in DIST_EXTENSIONS.items(): if filename.endswith(ext): meta = DIST_TYPES[dtype](filename) break else: raise ValueError("Unknown distribution format: '%s'" % os.path.basename(filename)) if dtype == "bdist_egg": pkgd = pkg_resources.Distribution.from_filename(filename) py_version = pkgd.py_version elif dtype == "bdist_wheel": py_version = meta.py_version elif dtype == "bdist_wininst": py_version = meta.py_version else: py_version = None # Fill in the data - send all the meta-data in case we need to # register a new release data = { # action ":action": "file_upload", "protcol_version": "1", # identify release "name": pkg_resources.safe_name(meta.name), "version": meta.version, # file content "filetype": dtype, "pyversion": py_version, # additional meta-data "metadata_version": meta.metadata_version, "summary": meta.summary, "home_page": meta.home_page, "author": meta.author, "author_email": meta.author_email, "maintainer": meta.maintainer, "maintainer_email": meta.maintainer_email, "license": meta.license, "description": meta.description, "keywords": meta.keywords, "platform": meta.platforms, "classifiers": meta.classifiers, "download_url": meta.download_url, "supported_platform": meta.supported_platforms, "comment": comment, # PEP 314 "provides": meta.provides, "requires": meta.requires, "obsoletes": meta.obsoletes, # Metadata 1.2 "project_urls": meta.project_urls, "provides_dist": meta.provides_dist, "obsoletes_dist": meta.obsoletes_dist, "requires_dist": meta.requires_dist, "requires_external": meta.requires_external, "requires_python": meta.requires_python, } md5_hash = hashlib.md5() with open(filename, "rb") as fp: content = fp.read(4096) while content: md5_hash.update(content) content = fp.read(4096) data["md5_digest"] = md5_hash.hexdigest() signed_name = os.path.basename(filename) + ".asc" if signed_name in signatures: with open(signatures[signed_name], "rb") as gpg: data["gpg_signature"] = (signed_name, gpg.read()) elif sign: with open(filename + ".asc", "rb") as gpg: data["gpg_signature"] = (signed_name, gpg.read()) print("Uploading {0}".format(os.path.basename(filename))) data_to_send = [] for key, value in data.items(): if isinstance(value, (list, tuple)): for item in value: data_to_send.append((key, item)) else: data_to_send.append((key, value)) with open(filename, "rb") as fp: data_to_send.append(( "content", (os.path.basename(filename), fp, "application/octet-stream"), )) encoder = MultipartEncoder(data_to_send) resp = session.post( config["repository"], data=encoder, auth=(username, password), allow_redirects=False, headers={'Content-Type': encoder.content_type}, ) # Bug 28. Try to silence a ResourceWarning by releasing the socket and # clearing the connection pool. resp.close() session.close() # Bug 92. If we get a redirect we should abort because something seems # funky. The behaviour is not well defined and redirects being issued # by PyPI should never happen in reality. This should catch malicious # redirects as well. if resp.is_redirect: raise exc.RedirectDetected( ('"{0}" attempted to redirect to "{1}" during upload.' ' Aborting...').format(config["respository"], resp.headers["location"])) # Otherwise, raise an HTTPError based on the status code. resp.raise_for_status()
def file_upload(request): # If we're in read-only mode, let upload clients know if request.flags.enabled("read-only"): raise _exc_with_message( HTTPForbidden, "Read-only mode: Uploads are temporarily disabled") # Before we do anything, if there isn't an authenticated user with this # request, then we'll go ahead and bomb out. if request.authenticated_userid is None: raise _exc_with_message( HTTPForbidden, "Invalid or non-existent authentication information.") # Ensure that user has a verified, primary email address. This should both # reduce the ease of spam account creation and activty, as well as act as # a forcing function for https://github.com/pypa/warehouse/issues/3632. # TODO: Once https://github.com/pypa/warehouse/issues/3632 has been solved, # we might consider a different condition, possibly looking at # User.is_active instead. if not (request.user.primary_email and request.user.primary_email.verified): raise _exc_with_message( HTTPBadRequest, ("User {!r} does not have a verified primary email address. " "Please add a verified primary email before attempting to " "upload to PyPI. See {project_help} for more information." "for more information.").format( request.user.username, project_help=request.help_url(_anchor="verified-email"), ), ) from None # Do some cleanup of the various form fields for key in list(request.POST): value = request.POST.get(key) if isinstance(value, str): # distutils "helpfully" substitutes unknown, but "required" values # with the string "UNKNOWN". This is basically never what anyone # actually wants so we'll just go ahead and delete anything whose # value is UNKNOWN. if value.strip() == "UNKNOWN": del request.POST[key] # Escape NUL characters, which psycopg doesn't like if "\x00" in value: request.POST[key] = value.replace("\x00", "\\x00") # We require protocol_version 1, it's the only supported version however # passing a different version should raise an error. if request.POST.get("protocol_version", "1") != "1": raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.") # Check if any fields were supplied as a tuple and have become a # FieldStorage. The 'content' and 'gpg_signature' fields _should_ be a # FieldStorage, however. # ref: https://github.com/pypa/warehouse/issues/2185 # ref: https://github.com/pypa/warehouse/issues/2491 for field in set(request.POST) - {"content", "gpg_signature"}: values = request.POST.getall(field) if any(isinstance(value, FieldStorage) for value in values): raise _exc_with_message(HTTPBadRequest, f"{field}: Should not be a tuple.") # Look up all of the valid classifiers all_classifiers = request.db.query(Classifier).all() # Validate and process the incoming metadata. form = MetadataForm(request.POST) # Add a validator for deprecated classifiers form.classifiers.validators.append(_no_deprecated_classifiers(request)) form.classifiers.choices = [(c.classifier, c.classifier) for c in all_classifiers] if not form.validate(): for field_name in _error_message_order: if field_name in form.errors: break else: field_name = sorted(form.errors.keys())[0] if field_name in form: field = form[field_name] if field.description and isinstance(field, wtforms.StringField): error_message = ( "{value!r} is an invalid value for {field}. ".format( value=field.data, field=field.description) + "Error: {} ".format(form.errors[field_name][0]) + "See " "https://packaging.python.org/specifications/core-metadata" ) else: error_message = "Invalid value for {field}. Error: {msgs[0]}".format( field=field_name, msgs=form.errors[field_name]) else: error_message = "Error: {}".format(form.errors[field_name][0]) raise _exc_with_message(HTTPBadRequest, error_message) # Ensure that we have file data in the request. if "content" not in request.POST: raise _exc_with_message(HTTPBadRequest, "Upload payload does not have a file.") # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before # going any further. try: project = (request.db.query(Project).filter( Project.normalized_name == func.normalize_pep426_name( form.name.data)).one()) except NoResultFound: # Check for AdminFlag set by a PyPI Administrator disabling new project # registration, reasons for this include Spammers, security # vulnerabilities, or just wanting to be lazy and not worry ;) if request.flags.enabled("disallow-new-project-registration"): raise _exc_with_message( HTTPForbidden, ("New project registration temporarily disabled. " "See {projecthelp} for details").format( projecthelp=request.help_url( _anchor="admin-intervention")), ) from None # Before we create the project, we're going to check our blacklist to # see if this project is even allowed to be registered. If it is not, # then we're going to deny the request to create this project. if request.db.query(exists().where( BlacklistedProject.name == func.normalize_pep426_name( form.name.data))).scalar(): raise _exc_with_message( HTTPBadRequest, ("The name {name!r} isn't allowed. " "See {projecthelp} " "for more information.").format( name=form.name.data, projecthelp=request.help_url(_anchor="project-name"), ), ) from None # Also check for collisions with Python Standard Library modules. if packaging.utils.canonicalize_name( form.name.data) in STDLIB_PROHIBITTED: raise _exc_with_message( HTTPBadRequest, ("The name {name!r} isn't allowed (conflict with Python " "Standard Library module name). See " "{projecthelp} for more information.").format( name=form.name.data, projecthelp=request.help_url(_anchor="project-name"), ), ) from None # The project doesn't exist in our database, so we'll add it along with # a role setting the current user as the "Owner" of the project. project = Project(name=form.name.data) request.db.add(project) request.db.add( Role(user=request.user, project=project, role_name="Owner")) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=project.name, action="create", submitted_by=request.user, submitted_from=request.remote_addr, )) request.db.add( JournalEntry( name=project.name, action="add Owner {}".format(request.user.username), submitted_by=request.user, submitted_from=request.remote_addr, )) # Check that the user has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. if not request.has_permission("upload", project): raise _exc_with_message( HTTPForbidden, ("The user '{0}' isn't allowed to upload to project '{1}'. " "See {2} for more information.").format( request.user.username, project.name, request.help_url(_anchor="project-name"), ), ) # Uploading should prevent broken rendered descriptions. # Temporarily disabled, see # https://github.com/pypa/warehouse/issues/4079 # if form.description.data: # description_content_type = form.description_content_type.data # if not description_content_type: # description_content_type = "text/x-rst" # rendered = readme.render( # form.description.data, description_content_type, use_fallback=False # ) # if rendered is None: # if form.description_content_type.data: # message = ( # "The description failed to render " # "for '{description_content_type}'." # ).format(description_content_type=description_content_type) # else: # message = ( # "The description failed to render " # "in the default format of reStructuredText." # ) # raise _exc_with_message( # HTTPBadRequest, # "{message} See {projecthelp} for more information.".format( # message=message, # projecthelp=request.help_url(_anchor="description-content-type"), # ), # ) from None try: canonical_version = packaging.utils.canonicalize_version( form.version.data) release = (request.db.query(Release).filter( (Release.project == project) & (Release.canonical_version == canonical_version)).one()) except MultipleResultsFound: # There are multiple releases of this project which have the same # canonical version that were uploaded before we checked for # canonical version equivalence, so return the exact match instead release = (request.db.query( Release).filter((Release.project == project) & (Release.version == form.version.data)).one()) except NoResultFound: release = Release( project=project, _classifiers=[ c for c in all_classifiers if c.classifier in form.classifiers.data ], _pypi_hidden=False, dependencies=list( _construct_dependencies( form, { "requires": DependencyKind.requires, "provides": DependencyKind.provides, "obsoletes": DependencyKind.obsoletes, "requires_dist": DependencyKind.requires_dist, "provides_dist": DependencyKind.provides_dist, "obsoletes_dist": DependencyKind.obsoletes_dist, "requires_external": DependencyKind.requires_external, "project_urls": DependencyKind.project_url, }, )), canonical_version=canonical_version, **{ k: getattr(form, k).data for k in { # This is a list of all the fields in the form that we # should pull off and insert into our new release. "version", "summary", "description", "description_content_type", "license", "author", "author_email", "maintainer", "maintainer_email", "keywords", "platform", "home_page", "download_url", "requires_python", } }, ) request.db.add(release) # TODO: This should be handled by some sort of database trigger or # a SQLAlchemy hook or the like instead of doing it inline in # this view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="new release", submitted_by=request.user, submitted_from=request.remote_addr, )) # TODO: We need a better solution to this than to just do it inline inside # this method. Ideally the version field would just be sortable, but # at least this should be some sort of hook or trigger. releases = (request.db.query(Release).filter( Release.project == project).options( orm.load_only(Release._pypi_ordering, Release._pypi_hidden)).all()) for i, r in enumerate( sorted(releases, key=lambda x: packaging.version.parse(x.version))): r._pypi_ordering = i # TODO: Again, we should figure out a better solution to doing this than # just inlining this inside this method. if project.autohide: for r in releases: r._pypi_hidden = bool(not r == release) # Pull the filename out of our POST data. filename = request.POST["content"].filename # Make sure that the filename does not contain any path separators. if "/" in filename or "\\" in filename: raise _exc_with_message( HTTPBadRequest, "Cannot upload a file with '/' or '\\' in the name.") # Make sure the filename ends with an allowed extension. if _dist_file_regexes[project.allow_legacy_files].search(filename) is None: raise _exc_with_message( HTTPBadRequest, "Invalid file extension: Use .egg, .tar.gz, .whl or .zip " "extension. (https://www.python.org/dev/peps/pep-0527)", ) # Make sure that our filename matches the project that it is being uploaded # to. prefix = pkg_resources.safe_name(project.name).lower() if not pkg_resources.safe_name(filename).lower().startswith(prefix): raise _exc_with_message( HTTPBadRequest, "Start filename for {!r} with {!r}.".format(project.name, prefix), ) # Check the content type of what is being uploaded if not request.POST["content"].type or request.POST[ "content"].type.startswith("image/"): raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.") # Ensure that the package filetype is allowed. # TODO: Once PEP 527 is completely implemented we should be able to delete # this and just move it into the form itself. if not project.allow_legacy_files and form.filetype.data not in { "sdist", "bdist_wheel", "bdist_egg", }: raise _exc_with_message(HTTPBadRequest, "Unknown type of file.") # The project may or may not have a file size specified on the project, if # it does then it may or may not be smaller or larger than our global file # size limits. file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit])) with tempfile.TemporaryDirectory() as tmpdir: temporary_filename = os.path.join(tmpdir, filename) # Buffer the entire file onto disk, checking the hash of the file as we # go along. with open(temporary_filename, "wb") as fp: file_size = 0 file_hashes = { "md5": hashlib.md5(), "sha256": hashlib.sha256(), "blake2_256": hashlib.blake2b(digest_size=256 // 8), } for chunk in iter(lambda: request.POST["content"].file.read(8096), b""): file_size += len(chunk) if file_size > file_size_limit: raise _exc_with_message( HTTPBadRequest, "File too large. " + "Limit for project {name!r} is {limit} MB. ".format( name=project.name, limit=file_size_limit // (1024 * 1024)) + "See " + request.help_url(_anchor="file-size-limit"), ) fp.write(chunk) for hasher in file_hashes.values(): hasher.update(chunk) # Take our hash functions and compute the final hashes for them now. file_hashes = { k: h.hexdigest().lower() for k, h in file_hashes.items() } # Actually verify the digests that we've gotten. We're going to use # hmac.compare_digest even though we probably don't actually need to # because it's better safe than sorry. In the case of multiple digests # we expect them all to be given. if not all([ hmac.compare_digest( getattr(form, "{}_digest".format(digest_name)).data.lower(), digest_value, ) for digest_name, digest_value in file_hashes.items() if getattr(form, "{}_digest".format(digest_name)).data ]): raise _exc_with_message( HTTPBadRequest, "The digest supplied does not match a digest calculated " "from the uploaded file.", ) # Check to see if the file that was uploaded exists already or not. is_duplicate = _is_duplicate_file(request.db, filename, file_hashes) if is_duplicate: return Response() elif is_duplicate is not None: raise _exc_with_message( HTTPBadRequest, # Note: Changing this error message to something that doesn't # start with "File already exists" will break the # --skip-existing functionality in twine # ref: https://github.com/pypa/warehouse/issues/3482 # ref: https://github.com/pypa/twine/issues/332 "File already exists. See " + request.help_url(_anchor="file-name-reuse"), ) # Check to see if the file that was uploaded exists in our filename log if request.db.query( request.db.query(Filename).filter( Filename.filename == filename).exists()).scalar(): raise _exc_with_message( HTTPBadRequest, "This filename has already been used, use a " "different version. " "See " + request.help_url(_anchor="file-name-reuse"), ) # Check to see if uploading this file would create a duplicate sdist # for the current release. if (form.filetype.data == "sdist" and request.db.query( request.db.query(File).filter((File.release == release) & ( File.packagetype == "sdist")).exists()).scalar()): raise _exc_with_message( HTTPBadRequest, "Only one sdist may be uploaded per release.") # Check the file to make sure it is a valid distribution file. if not _is_valid_dist_file(temporary_filename, form.filetype.data): raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.") # Check that if it's a binary wheel, it's on a supported platform if filename.endswith(".whl"): wheel_info = _wheel_file_re.match(filename) plats = wheel_info.group("plat").split(".") for plat in plats: if not _valid_platform_tag(plat): raise _exc_with_message( HTTPBadRequest, "Binary wheel '{filename}' has an unsupported " "platform tag '{plat}'.".format(filename=filename, plat=plat), ) # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: has_signature = True with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp: signature_size = 0 for chunk in iter( lambda: request.POST["gpg_signature"].file.read(8096), b""): signature_size += len(chunk) if signature_size > MAX_SIGSIZE: raise _exc_with_message(HTTPBadRequest, "Signature too large.") fp.write(chunk) # Check whether signature is ASCII armored with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp: if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"): raise _exc_with_message( HTTPBadRequest, "PGP signature isn't ASCII armored.") else: has_signature = False # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add(Filename(filename=filename)) # Store the information about the file in the database. file_ = File( release=release, filename=filename, python_version=form.pyversion.data, packagetype=form.filetype.data, comment_text=form.comment.data, size=file_size, has_signature=bool(has_signature), md5_digest=file_hashes["md5"], sha256_digest=file_hashes["sha256"], blake2_256_digest=file_hashes["blake2_256"], # Figure out what our filepath is going to be, we're going to use a # directory structure based on the hash of the file contents. This # will ensure that the contents of the file cannot change without # it also changing the path that the file is saved too. path="/".join([ file_hashes[PATH_HASHER][:2], file_hashes[PATH_HASHER][2:4], file_hashes[PATH_HASHER][4:], filename, ]), ) request.db.add(file_) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="add {python_version} file {filename}".format( python_version=file_.python_version, filename=file_.filename), submitted_by=request.user, submitted_from=request.remote_addr, )) # TODO: We need a better answer about how to make this transactional so # this won't take affect until after a commit has happened, for # now we'll just ignore it and save it before the transaction is # committed. storage = request.find_service(IFileStorage) storage.store( file_.path, os.path.join(tmpdir, filename), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) if has_signature: storage.store( file_.pgp_path, os.path.join(tmpdir, filename + ".asc"), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) return Response()
def create_plugin_setup_parameters( identifier="todo", name="TODO", version="0.1", description="TODO", author="TODO", mail="*****@*****.**", url="TODO", license="AGPLv3", source_folder=".", additional_data=None, additional_packages=None, ignored_packages=None, requires=None, extra_requires=None, cmdclass=None, eggs=None, package=None, dependency_links=None, ): import pkg_resources if package is None: package = "octoprint_{identifier}".format(**locals()) if additional_data is None: additional_data = list() if additional_packages is None: additional_packages = list() if ignored_packages is None: ignored_packages = list() if dependency_links is None: dependency_links = list() if requires is None: requires = [] if not isinstance(requires, list): raise ValueError("requires must be a list") if extra_requires is None: extra_requires = dict() if not isinstance(extra_requires, dict): raise ValueError("extra_requires must be a dict") if cmdclass is None: cmdclass = dict() if not isinstance(cmdclass, dict): raise ValueError("cmdclass must be a dict") if eggs is None: eggs = [] if not isinstance(eggs, list): raise ValueError("eggs must be a list") egg = "{name}*.egg-info".format( name=pkg_resources.to_filename(pkg_resources.safe_name(name))) if egg not in eggs: eggs = [egg] + eggs cmdclass.update( dict(clean=CleanCommand.for_options( source_folder=os.path.join(source_folder, package), eggs=eggs))) translation_dir = os.path.join(source_folder, "translations") pot_file = os.path.join(translation_dir, "messages.pot") bundled_dir = os.path.join(source_folder, package, "translations") cmdclass.update( get_babel_commandclasses( pot_file=pot_file, output_dir=translation_dir, bundled_dir=bundled_dir, pack_name_prefix="{name}-i18n-".format(**locals()), pack_path_prefix="_plugins/{identifier}/".format(**locals()), )) from setuptools import find_packages packages = set([package] + list( filter( lambda x: x.startswith("{package}.".format(package=package)), find_packages(where=source_folder, exclude=ignored_packages), )) + additional_packages) print("Found packages: {packages!r}".format(**locals())) return dict( name=name, version=version, description=description, author=author, author_email=mail, url=url, license=license, # adding new commands cmdclass=cmdclass, # we only have our plugin package to install packages=packages, # we might have additional data files in sub folders that need to be installed too package_data={ package: package_data_dirs( os.path.join(source_folder, package), ["static", "templates", "translations"] + additional_data, ) }, include_package_data=True, # If you have any package data that needs to be accessible on the file system, such as templates or static assets # this plugin is not zip_safe. zip_safe=False, install_requires=requires, extras_require=extra_requires, dependency_links=dependency_links, # Hook the plugin into the "octoprint.plugin" entry point, mapping the plugin_identifier to the plugin_package. # That way OctoPrint will be able to find the plugin and load it. entry_points={ "octoprint.plugin": ["{identifier} = {package}".format(**locals())] }, )
def name(self): return safe_name(self.distribution.get_name())
def run(self): "Quickstarts the new project." while not self.name: self.name = raw_input("Enter project name: ") while not self.package: package = self.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) self.package = raw_input("Enter package name [%s]: " % package) if not self.package: self.package = package doidentity = self.identity while self.prompt_identity and not doidentity: doidentity = raw_input("Do you need Identity " "(usernames/passwords) in this project? [no] ") doidentity = doidentity.lower() if not doidentity or doidentity.startswith('n'): self.identity = "none" break if doidentity.startswith("y"): doidentity = True break print "Please enter y(es) or n(o)." doidentity = None if doidentity is True: if self.sqlalchemy or self.elixir: self.identity = "sqlalchemy" else: self.identity = "sqlobject" else: self.identity = "none" self.name = pkg_resources.safe_name(self.name) env = pkg_resources.Environment() if self.name.lower() in env: print 'The name "%s" is already in use by' % self.name, for dist in env[self.name]: print dist return import imp try: if imp.find_module(self.package): print 'The package name "%s" is already in use' % self.package return except ImportError: pass if os.path.exists(self.name): print 'A directory called "%s" already exists. Exiting.' % self.name return command = create_distro.CreateDistroCommand("quickstart") cmd_args = [] for template in self.templates.split(" "): cmd_args.append("--template=%s" % template) if self.svn_repository: cmd_args.append("--svn-repository=%s" % self.svn_repository) if self.dry_run: cmd_args.append("--simulate") cmd_args.append("-q") cmd_args.append(self.name) cmd_args.append("sqlalchemy=%s" % self.sqlalchemy) cmd_args.append("elixir=%s" % self.elixir) cmd_args.append("sqlobject=%s" % self.sqlobject) cmd_args.append("identity=%s" % self.identity) cmd_args.append("package=%s" % self.package) # set the exact ORM-version for the proper requirements # it's extracted from our own requirements, so looking # them up must be in sync (there must be the extras_require named sqlobject/sqlalchemy) if self.sqlobject: sqlobjectversion = str(get_requirement('sqlobject')) cmd_args.append("sqlobjectversion=%s" % sqlobjectversion) if self.sqlalchemy: sqlalchemyversion = str(get_requirement('sqlalchemy')) cmd_args.append("sqlalchemyversion=%s" % sqlalchemyversion) if self.elixir: elixirversion = str(get_requirement('sqlalchemy', 'elixir')) cmd_args.append("elixirversion=%s" % elixirversion) command.run(cmd_args) if not self.dry_run: os.chdir(self.name) if self.sqlobject: # Create the SQLObject history directory only when needed. # With paste.script it's only possible to skip files, but # not directories. So we are handling this manually. sodir = '%s/sqlobject-history' % self.package if not os.path.exists(sodir): os.mkdir(sodir) try: if not os.path.exists(os.path.join(os.path.dirname( os.path.abspath(sodir)), '.svn')): raise OSError command.run_command('svn', 'add', sodir) except OSError: pass startscript = "start-%s.py" % self.package if os.path.exists(startscript): oldmode = os.stat(startscript).st_mode os.chmod(startscript, oldmode | stat.S_IXUSR) sys.argv = ["setup.py", "egg_info"] import imp imp.load_module("setup", *imp.find_module("setup", ["."])) # dirty hack to allow "empty" dirs for base, path, files in os.walk("./"): for file in files: if file == "empty": os.remove(os.path.join(base, file))
def command(self): """LayCement for the new project.""" self.__dict__.update(self.options.__dict__) if self.args: self.name = self.args[0] while not self.name: self.name = raw_input("Enter project name: ").strip() self.name = pkg_resources.safe_name(self.name) package = self.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("_", package) self.package = raw_input( "Enter module name [%s]: " % package).strip() or package self.description = raw_input("Project Description: ").strip() self.creator = raw_input("Project Creator: ").strip() self.creator_email = raw_input("Project Creator Email: ").strip() self.url = raw_input("Project URL: ").strip() self.license = raw_input("Project License: ").strip() env = pkg_resources.Environment() if self.name.lower() in env: print 'The name "%s" is already in use by ' % self.name, for dist in env[self.name]: print dist return import imp try: if imp.find_module(self.package): print 'The package name "%s" is already in use' % self.package return except ImportError: pass if os.path.exists(self.name): print 'A directory called "%s" already exists. Exiting.' % self.name return command = create_distro.CreateDistroCommand("create") cmd_args = [] for template in self.templates.split(): cmd_args.append("--template=%s" % template) cmd_args.append(self.name) cmd_args.append("package=%s" % self.package) cmd_args.append("cement_version=%s" % CEMENT_VERSION) cmd_args.append("cement_next_version=%s" % CEMENT_NEXT_VERSION) cmd_args.append("description=%s" % self.description) cmd_args.append("creator=%s" % self.creator) cmd_args.append("creator_email=%s" % self.creator_email) cmd_args.append("url=%s" % self.url) cmd_args.append("license=%s" % self.license) command.run(cmd_args) if not self.dry_run: sys.argv = ["setup.py", "egg_info"] # dirty hack to allow "empty" dirs for base, path, files in os.walk("./"): for file in files: if file == "empty": os.remove(os.path.join(base, file))
def allow_external(self, key): self._allow_external.add(safe_name(key).lower()) return self
def _dir2pi(option, argv): pkgdir = argv[1] if not os.path.isdir(pkgdir): raise ValueError("no such directory: %r" % (pkgdir, )) pkgdirpath = lambda *x: os.path.join(pkgdir, *x) shutil.rmtree(pkgdirpath("simple"), ignore_errors=True) os.mkdir(pkgdirpath("simple")) pkg_index = ("<html><head><title>Simple Index</title>" "<meta name='api-version' value='2' /></head><body>\n") processed_pkg = set() for file in sorted(os.listdir(pkgdir)): pkg_filepath = os.path.join(pkgdir, file) if not os.path.isfile(pkg_filepath): continue pkg_basename = os.path.basename(file) if pkg_basename.startswith("."): continue pkg_name, pkg_rest = file_to_package(pkg_basename, pkgdir) # FIXME: A hack to workaround what are considered safe names for # distributions in wheels vs standard distribution names. # https://github.com/pypa/setuptools/blob/16187afb3f532199f4951801d4e39939c560facc/pkg_resources/__init__.py#L1416-L1421 if file.endswith(".whl"): pkg_dir_name = pkg_resources.safe_name(pkg_name) else: pkg_dir_name = pkg_name if option.normalize_package_names: pkg_dir_name = normalize_pep503(pkg_dir_name) pkg_dir = pkgdirpath("simple", pkg_dir_name) if not os.path.exists(pkg_dir): os.mkdir(pkg_dir) if option.aggressive_normalization: try_symlink(option, pkg_dir_name, pkgdirpath("simple", normalize_pip67(pkg_name))) try_symlink(option, pkg_dir_name, pkgdirpath("simple", pkg_name)) pkg_new_basename = "-".join([pkg_name, pkg_rest]) symlink_target = os.path.join(pkg_dir, pkg_new_basename) symlink_source = os.path.join("../../", pkg_basename) if option.use_symlink: try_symlink(option, symlink_source, symlink_target) else: if option.verbose: print('copying %s to %s' % (pkg_filepath, symlink_target)) shutil.copy2(pkg_filepath, symlink_target) if pkg_name not in processed_pkg: pkg_index += "<a href='%s/'>%s</a><br />\n" % ( html.escape(pkg_dir_name), html.escape(pkg_name), ) processed_pkg.add(pkg_name) if option.build_html: with open(os.path.join(pkg_dir, "index.html"), "a") as fp: fp.write("<a href='%(name)s'>%(name)s</a><br />\n" % { "name": html.escape(pkg_new_basename), }) pkg_index += "</body></html>\n" if option.build_html: with open(pkgdirpath("simple/index.html"), "w") as fp: fp.write(pkg_index) return 0
def command(self): """Quickstarts the new project.""" self.__dict__.update(self.options.__dict__) if not True in [self.sqlalchemy, self.sqlobject]: self.sqlalchemy = True if self.args: self.name = self.args[0] while not self.name: self.name = raw_input("Enter project name: ") package = self.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) if package and self.no_input: self.package = package else: self.package = None while not self.package: self.package = raw_input( "Enter package name [%s]: " % package).strip() or package if not self.no_input: while self.auth is None: self.auth = raw_input( "Do you need authentication and authorization" " in this project? [yes] ") self.auth = dict(y=True, n=False).get(self.auth.lstrip()[:1].lower() or 'y') if self.auth is None: print "Please enter y(es) or n(o)." if self.auth: if self.sqlalchemy: self.auth = "sqlalchemy" else: print( 'You can only use authentication and authorization' ' in a new project if you use SQLAlchemy. Please check' ' the repoze.what documentation to learn how to implement' ' authentication/authorization with other sources.') return # TODO: As far as I know, SQLObject has never been supported in # TG2 # self.auth = "sqlobject" else: self.auth = None self.name = pkg_resources.safe_name(self.name) env = pkg_resources.Environment() if self.name.lower() in env: print 'The name "%s" is already in use by' % self.name, for dist in env[self.name]: print dist return import imp try: if imp.find_module(self.package): print 'The package name "%s" is already in use' % self.package return except ImportError: pass if os.path.exists(self.name): print 'A directory called "%s" already exists. Exiting.' % self.name return command = create_distro.CreateDistroCommand("create") cmd_args = [] for template in self.templates.split(): cmd_args.append("--template=%s" % template) if self.svn_repository: cmd_args.append("--svn-repository=%s" % self.svn_repository) if self.dry_run: cmd_args.append("--simulate") cmd_args.append("-q") cmd_args.append(self.name) cmd_args.append("sqlalchemy=%s" % self.sqlalchemy) cmd_args.append("sqlobject=%s" % self.sqlobject) cmd_args.append("auth=%s" % self.auth) cmd_args.append("geo=%s" % self.geo) cmd_args.append("package=%s" % self.package) cmd_args.append("tgversion=%s" % self.version) # set the exact ORM-version for the proper requirements # it's extracted from our own requirements, so looking # them up must be in sync (there must be the extras_require named # sqlobject/sqlalchemy) """if self.sqlobject: sqlobjectversion = str(get_requirement('sqlobject')) cmd_args.append("sqlobjectversion=%s" % sqlobjectversion) if self.sqlalchemy: sqlalchemyversion = str(get_requirement('sqlalchemy')) cmd_args.append("sqlalchemyversion=%s" % sqlalchemyversion) """ command.run(cmd_args) if not self.dry_run: os.chdir(self.name) if self.sqlobject: # Create the SQLObject history directory only when needed. # With paste.script it's only possible to skip files, but # not directories. So we are handling this manually. sodir = '%s/sqlobject-history' % self.package if not os.path.exists(sodir): os.mkdir(sodir) try: if not os.path.exists( os.path.join( os.path.dirname(os.path.abspath(sodir)), '.svn')): raise OSError command.run_command('svn', 'add', sodir) except OSError: pass startscript = "start-%s.py" % self.package if os.path.exists(startscript): oldmode = os.stat(startscript).st_mode os.chmod(startscript, oldmode | stat.S_IXUSR) sys.argv = ["setup.py", "egg_info"] import imp imp.load_module("setup", *imp.find_module("setup", ["."])) # dirty hack to allow "empty" dirs for base, path, files in os.walk("./"): for file in files: if file == "empty": os.remove(os.path.join(base, file))
def file_upload(request): # Before we do anything, if there isn't an authenticated user with this # request, then we'll go ahead and bomb out. if request.authenticated_userid is None: raise _exc_with_message( HTTPForbidden, "Invalid or non-existent authentication information.", ) # distutils "helpfully" substitutes unknown, but "required" values with the # string "UNKNOWN". This is basically never what anyone actually wants so # we'll just go ahead and delete anything whose value is UNKNOWN. for key in list(request.POST): if request.POST.get(key) == "UNKNOWN": del request.POST[key] # We require protocol_version 1, it's the only supported version however # passing a different version should raise an error. if request.POST.get("protocol_version", "1") != "1": raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.") # Look up all of the valid classifiers all_classifiers = request.db.query(Classifier).all() # Validate and process the incoming metadata. form = MetadataForm(request.POST) # Check if the classifiers were supplied as a tuple # ref: https://github.com/pypa/warehouse/issues/2185 classifiers = request.POST.getall('classifiers') if any(isinstance(classifier, FieldStorage) for classifier in classifiers): raise _exc_with_message( HTTPBadRequest, "classifiers: Must be a list, not tuple.", ) form.classifiers.choices = [(c.classifier, c.classifier) for c in all_classifiers] if not form.validate(): for field_name in _error_message_order: if field_name in form.errors: break else: field_name = sorted(form.errors.keys())[0] raise _exc_with_message( HTTPBadRequest, "{field}: {msgs[0]}".format( field=field_name, msgs=form.errors[field_name], ), ) # Ensure that we have file data in the request. if "content" not in request.POST: raise _exc_with_message( HTTPBadRequest, "Upload payload does not have a file.", ) # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before # going any further. try: project = (request.db.query(Project).filter( Project.normalized_name == func.normalize_pep426_name( form.name.data)).one()) except NoResultFound: # Before we create the project, we're going to check our blacklist to # see if this project is even allowed to be registered. If it is not, # then we're going to deny the request to create this project. if request.db.query(exists().where( BlacklistedProject.name == func.normalize_pep426_name( form.name.data))).scalar(): raise _exc_with_message( HTTPBadRequest, ("The name {!r} is not allowed. " "See https://pypi.org/help/#project-name " "for more information.").format(form.name.data), ) from None # Also check for collisions with Python Standard Library modules. if (packaging.utils.canonicalize_name(form.name.data) in STDLIB_PROHIBITTED): raise _exc_with_message( HTTPBadRequest, ("The name {!r} is not allowed (conflict with Python " "Standard Library module name). See " "https://pypi.org/help/#project-name for more information." ).format(form.name.data), ) from None # The project doesn't exist in our database, so we'll add it along with # a role setting the current user as the "Owner" of the project. project = Project(name=form.name.data) request.db.add(project) request.db.add( Role(user=request.user, project=project, role_name="Owner")) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=project.name, action="create", submitted_by=request.user, submitted_from=request.remote_addr, ), ) request.db.add( JournalEntry( name=project.name, action="add Owner {}".format(request.user.username), submitted_by=request.user, submitted_from=request.remote_addr, ), ) # Check that the user has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. if not request.has_permission("upload", project): raise _exc_with_message( HTTPForbidden, ("The user '{0}' is not allowed to upload to project '{1}'. " "See https://pypi.org/help#project-name for more information." ).format(request.user.username, project.name)) try: release = (request.db.query( Release).filter((Release.project == project) & (Release.version == form.version.data)).one()) except NoResultFound: release = Release( project=project, _classifiers=[ c for c in all_classifiers if c.classifier in form.classifiers.data ], _pypi_hidden=False, dependencies=list( _construct_dependencies( form, { "requires": DependencyKind.requires, "provides": DependencyKind.provides, "obsoletes": DependencyKind.obsoletes, "requires_dist": DependencyKind.requires_dist, "provides_dist": DependencyKind.provides_dist, "obsoletes_dist": DependencyKind.obsoletes_dist, "requires_external": DependencyKind.requires_external, "project_urls": DependencyKind.project_url, })), **{ k: getattr(form, k).data for k in { # This is a list of all the fields in the form that we # should pull off and insert into our new release. "version", "summary", "description", "license", "author", "author_email", "maintainer", "maintainer_email", "keywords", "platform", "home_page", "download_url", "requires_python", } }) request.db.add(release) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="new release", submitted_by=request.user, submitted_from=request.remote_addr, ), ) # TODO: We need a better solution to this than to just do it inline inside # this method. Ideally the version field would just be sortable, but # at least this should be some sort of hook or trigger. releases = (request.db.query(Release).filter( Release.project == project).all()) for i, r in enumerate( sorted(releases, key=lambda x: packaging.version.parse(x.version))): r._pypi_ordering = i # TODO: Again, we should figure out a better solution to doing this than # just inlining this inside this method. if project.autohide: for r in releases: r._pypi_hidden = bool(not r == release) # Pull the filename out of our POST data. filename = request.POST["content"].filename # Make sure that the filename does not contain any path separators. if "/" in filename or "\\" in filename: raise _exc_with_message( HTTPBadRequest, "Cannot upload a file with '/' or '\\' in the name.", ) # Make sure the filename ends with an allowed extension. if _dist_file_regexes[project.allow_legacy_files].search(filename) is None: raise _exc_with_message( HTTPBadRequest, "Invalid file extension. PEP 527 requires one of: .egg, .tar.gz, " ".whl, .zip (https://www.python.org/dev/peps/pep-0527/).") # Make sure that our filename matches the project that it is being uploaded # to. prefix = pkg_resources.safe_name(project.name).lower() if not pkg_resources.safe_name(filename).lower().startswith(prefix): raise _exc_with_message( HTTPBadRequest, "The filename for {!r} must start with {!r}.".format( project.name, prefix, )) # Check the content type of what is being uploaded if (not request.POST["content"].type or request.POST["content"].type.startswith("image/")): raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.") # Ensure that the package filetpye is allowed. # TODO: Once PEP 527 is completely implemented we should be able to delete # this and just move it into the form itself. if (not project.allow_legacy_files and form.filetype.data not in {"sdist", "bdist_wheel", "bdist_egg"}): raise _exc_with_message(HTTPBadRequest, "Unknown type of file.") # The project may or may not have a file size specified on the project, if # it does then it may or may not be smaller or larger than our global file # size limits. file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit])) with tempfile.TemporaryDirectory() as tmpdir: temporary_filename = os.path.join(tmpdir, filename) # Buffer the entire file onto disk, checking the hash of the file as we # go along. with open(temporary_filename, "wb") as fp: file_size = 0 file_hashes = { "md5": hashlib.md5(), "sha256": hashlib.sha256(), "blake2_256": hashlib.blake2b(digest_size=256 // 8), } for chunk in iter(lambda: request.POST["content"].file.read(8096), b""): file_size += len(chunk) if file_size > file_size_limit: raise _exc_with_message(HTTPBadRequest, "File too large.") fp.write(chunk) for hasher in file_hashes.values(): hasher.update(chunk) # Take our hash functions and compute the final hashes for them now. file_hashes = { k: h.hexdigest().lower() for k, h in file_hashes.items() } # Actually verify the digests that we've gotten. We're going to use # hmac.compare_digest even though we probably don't actually need to # because it's better safe than sorry. In the case of multiple digests # we expect them all to be given. if not all([ hmac.compare_digest( getattr(form, "{}_digest".format(digest_name)).data.lower(), digest_value, ) for digest_name, digest_value in file_hashes.items() if getattr(form, "{}_digest".format(digest_name)).data ]): raise _exc_with_message( HTTPBadRequest, "The digest supplied does not match a digest calculated " "from the uploaded file.") # Check to see if the file that was uploaded exists already or not. is_duplicate = _is_duplicate_file(request.db, filename, file_hashes) if is_duplicate: return Response() elif is_duplicate is not None: raise _exc_with_message(HTTPBadRequest, "File already exists.") # Check to see if the file that was uploaded exists in our filename log if (request.db.query( request.db.query(Filename).filter( Filename.filename == filename).exists()).scalar()): raise _exc_with_message( HTTPBadRequest, "This filename has previously been used, you should use a " "different version.", ) # Check to see if uploading this file would create a duplicate sdist # for the current release. if (form.filetype.data == "sdist" and request.db.query( request.db.query(File).filter((File.release == release) & ( File.packagetype == "sdist")).exists()).scalar()): raise _exc_with_message( HTTPBadRequest, "Only one sdist may be uploaded per release.", ) # Check the file to make sure it is a valid distribution file. if not _is_valid_dist_file(temporary_filename, form.filetype.data): raise _exc_with_message( HTTPBadRequest, "Invalid distribution file.", ) # Check that if it's a binary wheel, it's on a supported platform if filename.endswith(".whl"): wheel_info = _wheel_file_re.match(filename) plats = wheel_info.group("plat").split(".") for plat in plats: if not _valid_platform_tag(plat): raise _exc_with_message( HTTPBadRequest, "Binary wheel '{filename}' has an unsupported " "platform tag '{plat}'.".format(filename=filename, plat=plat)) # Also buffer the entire signature file to disk. if "gpg_signature" in request.POST: has_signature = True with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp: signature_size = 0 for chunk in iter( lambda: request.POST["gpg_signature"].file.read(8096), b""): signature_size += len(chunk) if signature_size > MAX_SIGSIZE: raise _exc_with_message( HTTPBadRequest, "Signature too large.", ) fp.write(chunk) # Check whether signature is ASCII armored with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp: if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"): raise _exc_with_message( HTTPBadRequest, "PGP signature is not ASCII armored.", ) else: has_signature = False # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add(Filename(filename=filename)) # Store the information about the file in the database. file_ = File( release=release, filename=filename, python_version=form.pyversion.data, packagetype=form.filetype.data, comment_text=form.comment.data, size=file_size, has_signature=bool(has_signature), md5_digest=file_hashes["md5"], sha256_digest=file_hashes["sha256"], blake2_256_digest=file_hashes["blake2_256"], # Figure out what our filepath is going to be, we're going to use a # directory structure based on the hash of the file contents. This # will ensure that the contents of the file cannot change without # it also changing the path that the file is saved too. path="/".join([ file_hashes[PATH_HASHER][:2], file_hashes[PATH_HASHER][2:4], file_hashes[PATH_HASHER][4:], filename, ]), ) request.db.add(file_) # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. request.db.add( JournalEntry( name=release.project.name, version=release.version, action="add {python_version} file {filename}".format( python_version=file_.python_version, filename=file_.filename, ), submitted_by=request.user, submitted_from=request.remote_addr, ), ) # TODO: We need a better answer about how to make this transactional so # this won't take affect until after a commit has happened, for # now we'll just ignore it and save it before the transaction is # committed. storage = request.find_service(IFileStorage) storage.store( file_.path, os.path.join(tmpdir, filename), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) if has_signature: storage.store( file_.pgp_path, os.path.join(tmpdir, filename + ".asc"), meta={ "project": file_.release.project.normalized_name, "version": file_.release.version, "package-type": file_.packagetype, "python-version": file_.python_version, }, ) # TODO: Once we no longer have the legacy code base running PyPI we can # go ahead and delete this tiny bit of shim code, since it only # exists to purge stuff on legacy PyPI when uploaded to Warehouse old_domain = request.registry.settings.get("warehouse.legacy_domain") if old_domain: request.tm.get().addAfterCommitHook( _legacy_purge, args=["https://{}/pypi".format(old_domain)], kws={"data": { ":action": "purge", "project": project.name }}, ) return Response()
def allow_unverified(self, key): self._allow_unverified.add(safe_name(key).lower()) return self
def project_name(self) -> str: return safe_name(self.name)
def command(self): """Quickstarts the new project.""" self.__dict__.update(self.options.__dict__) if self.args: self.name = self.args[0] while not self.name: self.name = raw_input("Enter project name: ") package = self.name.lower() package = beginning_letter.sub("", package) package = valid_only.sub("", package) if package: self.package = package else: self.package = None while not self.package: self.package = raw_input( "Enter package name [%s]: " % package).strip() or package self.name = pkg_resources.safe_name(self.name).replace('-', '_') self.rpm_name = self.package.replace('.', '-') env = pkg_resources.Environment() if self.name.lower() in env: print 'The name "%s" is already in use by' % self.name, for dist in env[self.name]: print dist return try: if imp.find_module(self.package): print 'The package name "%s" is already in use' % self.package return except ImportError: pass if os.path.exists(self.name): print 'A directory called "%s" already exists. Exiting.' % self.name return command = create_distro.CreateDistroCommand("create") cmd_args = ['--template=moksha.master'] if self.livewidget: cmd_args.append('--template=moksha.livewidget') if self.stream: cmd_args.append('--template=moksha.stream') if self.consumer: cmd_args.append('--template=moksha.consumer') if self.controller: cmd_args.append('--template=moksha.controller') #if self.dry_run: # cmd_args.append("--simulate") # cmd_args.append("-q") cmd_args.append(self.name) cmd_args.append("livewidget=%s" % self.livewidget) cmd_args.append("consumer=%s" % self.consumer) cmd_args.append("controller=%s" % self.controller) cmd_args.append("stream=%s" % self.stream) cmd_args.append("package=%s" % self.package) cmd_args.append("widget_name=%s" % self.package.title() + 'Widget') cmd_args.append("stream_name=%s" % self.package.title() + 'Stream') cmd_args.append("consumer_name=%s" % self.package.title() + 'Consumer') cmd_args.append("controller_name=%s" % self.package.title() + 'Controller') command.run(cmd_args)
for name in files: # Stop from looking within package for other setup.py if (os.path.join(dirpath, name).count("/") == 9): if name == 'setup.py': # The name of the package dirpathCpy = dirpath.lower() packageName = dirpathCpy[dirpathCpy.rfind('/') + 1:len(dirpathCpy)] splicedstring = dirpathCpy[:dirpathCpy.rfind('/')] #The name of the directory that contain the package packageDir = splicedstring[splicedstring.rfind('/') + 1:len(splicedstring)] packageNamesub = dirpathCpy[dirpathCpy.rfind('/') + 1:len(dirpathCpy)] # Normalize the name by replacing non-alphanumerics with '-' packageDir = pkg_resources.safe_name(packageDir) packageNamesub = pkg_resources.safe_name(packageNamesub) # Checks for potential errors because standard naming: Numpy/Numpy-1.2 # where Numpy = package directory and Numpy-1.2 is packageName if packageDir not in packageNamesub: weirdCases.append(os.path.join(dirpath, name)) break counter = counter + 1 # Should return '' if there is no requires.txt. req_string = getReq.get_from_require(os.path.join(dirpath, '')) req_string = req_string.lower() if req_string: req_list = req_string.split() else: try:
def write(name, version, doc, entry_map, src_files, distributions, modules, dst_dir, logger, observer=None, compress=True): """ Write egg in the manner of :mod:`setuptools`, with some differences: - Writes directly to the zip file, avoiding some intermediate copies. - Doesn't compile any Python modules. name: string Must be an alphanumeric string. version: string Must be an alphanumeric string. doc: string Used for the `Summary` and `Description` entries in the egg's metadata. entry_map: dict A :mod:`pkg_resources` :class:`EntryPoint` map: a dictionary mapping group names to dictionaries mapping entry point names to :class:`EntryPoint` objects. src_files: list List of non-Python files to include. distributions: list List of Distributions this egg depends on. It is used for the `Requires` entry in the egg's metadata. modules: list List of module names not found in a distribution that this egg depends on. It is used for the `Requires` entry in the egg's metadata and is also recorded in the 'openmdao_orphans.txt' resource. dst_dir: string The directory to write the egg to. logger: Logger Used for recording progress, etc. observer: callable Will be called via an :class:`EggObserver` intermediary. Returns the egg's filename. """ observer = eggobserver.EggObserver(observer, logger) egg_name = egg_filename(name, version) egg_path = os.path.join(dst_dir, egg_name) distributions = sorted(distributions, key=lambda dist: dist.project_name) modules = sorted(modules) sources = [] files = [] size = 0 # Approximate (uncompressed) size. Used to set allowZip64 flag. # Collect src_files. for path in src_files: path = os.path.join(name, path) files.append(path) size += os.path.getsize(path) # Collect Python modules. for dirpath, dirnames, filenames in os.walk('.', followlinks=True): dirs = copy.copy(dirnames) for path in dirs: if not os.path.exists(os.path.join(dirpath, path, '__init__.py')): dirnames.remove(path) for path in filenames: if path.endswith('.py'): path = os.path.join(dirpath[2:], path) # Skip leading './' # No reason for a file to appear twice in our file list. if path not in files: files.append(path) size += os.path.getsize(path) sources.append(path) # Package info -> EGG-INFO/PKG-INFO pkg_info = [] pkg_info.append('Metadata-Version: 1.1') pkg_info.append('Name: %s' % pkg_resources.safe_name(name)) pkg_info.append('Version: %s' % pkg_resources.safe_version(version)) pkg_info.append('Summary: %s' % doc.strip().split('\n')[0]) pkg_info.append('Description: %s' % doc.strip()) pkg_info.append('Author-email: UNKNOWN') pkg_info.append('License: UNKNOWN') pkg_info.append('Platform: UNKNOWN') for dist in distributions: pkg_info.append('Requires: %s (%s)' % (dist.project_name, dist.version)) for module in modules: pkg_info.append('Requires: %s' % module) pkg_info = '\n'.join(pkg_info) + '\n' sources.append(name + '.egg-info/PKG-INFO') size += len(pkg_info) # Dependency links -> EGG-INFO/dependency_links.txt dependency_links = '\n' sources.append(name + '.egg-info/dependency_links.txt') size += len(dependency_links) # Entry points -> EGG-INFO/entry_points.txt entry_points = [] for entry_group in sorted(entry_map.keys()): entry_points.append('[%s]' % entry_group) for entry_name in sorted(entry_map[entry_group].keys()): entry_points.append('%s' % entry_map[entry_group][entry_name]) entry_points.append('') entry_points = '\n'.join(entry_points) + '\n' sources.append(name + '.egg-info/entry_points.txt') size += len(entry_points) # Unsafe -> EGG-INFO/not-zip-safe not_zip_safe = '\n' sources.append(name + '.egg-info/not-zip-safe') size += len(not_zip_safe) # Requirements -> EGG-INFO/requires.txt requirements = [str(dist.as_requirement()) for dist in distributions] requirements = '\n'.join(requirements) + '\n' sources.append(name + '.egg-info/requires.txt') size += len(requirements) # Modules not part of a distribution -> EGG-INFO/openmdao_orphans.txt orphans = '\n'.join(modules) + '\n' sources.append(name + '.egg-info/openmdao_orphans.txt') size += len(orphans) # Top-level names -> EGG-INFO/top_level.txt top_level = '%s\n' % name sources.append(name + '.egg-info/top_level.txt') size += len(top_level) # Manifest -> EGG-INFO/SOURCES.txt sources.append(name + '.egg-info/SOURCES.txt') sources = '\n'.join(sorted(sources)) + '\n' size += len(sources) # Open zipfile. logger.debug('Creating %s', egg_path) zip64 = size > zipfile.ZIP64_LIMIT compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED egg = zipfile.ZipFile(egg_path, 'w', compression, zip64) stats = { 'completed_files': 0., 'total_files': float(8 + len(files)), 'completed_bytes': 0., 'total_bytes': float(size) } # Write egg info. _write_info(egg, 'PKG-INFO', pkg_info, observer, stats) _write_info(egg, 'dependency_links.txt', dependency_links, observer, stats) _write_info(egg, 'entry_points.txt', entry_points, observer, stats) _write_info(egg, 'not-zip-safe', not_zip_safe, observer, stats) _write_info(egg, 'requires.txt', requirements, observer, stats) _write_info(egg, 'openmdao_orphans.txt', orphans, observer, stats) _write_info(egg, 'top_level.txt', top_level, observer, stats) _write_info(egg, 'SOURCES.txt', sources, observer, stats) # Write collected files. for path in sorted(files): _write_file(egg, path, observer, stats) observer.complete(egg_name) egg.close() if os.path.getsize(egg_path) > zipfile.ZIP64_LIMIT: logger.warning('Egg zipfile requires Zip64 support to unzip.') return egg_name
def normalize_package(name): return safe_name(name).lower()