def make_wheel_metadata_file( name, # type: str version, # type: str value, # type: Defaulted[Optional[AnyStr]] tags, # type: Sequence[Tuple[str, str, str]] updates, # type: Defaulted[Dict[str, HeaderValue]] ): # type: (...) -> Optional[File] if value is None: return None path = dist_info_path(name, version, "WHEEL") if value is not _default: return File(path, ensure_binary(value)) metadata = CaseInsensitiveDict({ "Wheel-Version": "1.0", "Generator": "pip-test-suite", "Root-Is-Purelib": "true", "Tag": ["-".join(parts) for parts in tags], }) if updates is not _default: metadata.update(updates) return File(path, ensure_binary(message_from_dict(metadata).as_string()))
def make_metadata_file( name, # type: str version, # type: str value, # type: Defaulted[Optional[AnyStr]] updates, # type: Defaulted[Dict[str, HeaderValue]] body, # type: Defaulted[AnyStr] ): # type: () -> File if value is None: return None path = dist_info_path(name, version, "METADATA") if value is not _default: return File(path, ensure_binary(value)) metadata = CaseInsensitiveDict({ "Metadata-Version": "2.1", "Name": name, "Version": version, }) if updates is not _default: metadata.update(updates) message = message_from_dict(metadata) if body is not _default: message.set_payload(body) return File(path, ensure_binary(message_from_dict(metadata).as_string()))
def save(self, pypi_version, current_time): # type: (str, datetime.datetime) -> None # If we do not have a path to cache in, don't bother saving. if not self.statefile_path: return # Check to make sure that we own the directory if not check_path_owner(os.path.dirname(self.statefile_path)): return # Now that we've ensured the directory is owned by this user, we'll go # ahead and make sure that all our directories are created. ensure_dir(os.path.dirname(self.statefile_path)) state = { # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, "last_check": current_time.strftime(SELFCHECK_DATE_FMT), "pypi_version": pypi_version, } text = json.dumps(state, sort_keys=True, separators=(",", ":")) with adjacent_tmp_file(self.statefile_path) as f: f.write(ensure_binary(text)) try: # Since we have a prefix-specific state file, we can just # overwrite whatever is there, no need to check. replace(f.name, self.statefile_path) except OSError: # Best effort. pass
def record_file_maker_wrapper( name, # type: str version, # type: str files, # type: List[File] record, # type: Defaulted[Optional[AnyStr]] record_callback, # type: Defaulted[RecordCallback] ): # type: (...) -> Iterable[File] records = [] # type: List[Record] for file in files: records.append( Record(file.name, digest(file.contents), str(len(file.contents)))) yield file if record is None: return record_path = dist_info_path(name, version, "RECORD") if record is not _default: yield File(record_path, ensure_binary(record)) return records.append(Record(record_path, "", "")) if record_callback is not _default: records = record_callback(records) with StringIO(newline="") as buf: writer = csv.writer(buf) for record in records: writer.writerow(map(ensure_text, record)) contents = buf.getvalue().encode("utf-8") yield File(record_path, contents)
def make_data_files(name, version, files): # type: (str, str, Dict[str, AnyStr]) -> List[File] data_dir = f"{name}-{version}.data" return [ File(f"{data_dir}/{name}", ensure_binary(contents)) for name, contents in files.items() ]
def make_metadata_files(name, version, files): # type: (str, str, Dict[str, AnyStr]) -> List[File] get_path = partial(dist_info_path, name, version) return [ File(get_path(name), ensure_binary(contents)) for name, contents in files.items() ]
def make_entry_points_file( name, # type: str version, # type: str entry_points, # type: Defaulted[Dict[str, List[str]]] console_scripts, # type: Defaulted[List[str]] ): # type: (...) -> Optional[File] if entry_points is _default and console_scripts is _default: return None if entry_points is _default: entry_points_data = {} else: entry_points_data = deepcopy(entry_points) if console_scripts is not _default: entry_points_data["console_scripts"] = console_scripts lines = [] for section, values in entry_points_data.items(): lines.append(f"[{section}]") lines.extend(values) return File( dist_info_path(name, version, "entry_points.txt"), ensure_binary("\n".join(lines)), )
def test_dict_metadata_works(): name = "simple" version = "0.1.0" require_a = "a==1.0" require_b = "b==1.1; extra == 'also_b'" requires = [require_a, require_b, "c==1.2; extra == 'also_c'"] extras = ["also_b", "also_c"] requires_python = ">=3" metadata = Message() metadata["Name"] = name metadata["Version"] = version for require in requires: metadata["Requires-Dist"] = require for extra in extras: metadata["Provides-Extra"] = extra metadata["Requires-Python"] = requires_python inner_metadata = DictMetadata({ "METADATA": ensure_binary(metadata.as_string()) }) dist = DistInfoDistribution( location="<in-memory>", metadata=inner_metadata, project_name=name ) assert name == dist.project_name assert version == dist.version assert set(extras) == set(dist.extras) assert [Requirement.parse(require_a)] == dist.requires([]) assert [ Requirement.parse(require_a), Requirement.parse(require_b) ] == dist.requires(["also_b"]) assert metadata.as_string() == get_metadata(dist).as_string() assert requires_python == get_requires_python(dist)
def make_data_files(name, version, files): # type: (str, str, Dict[str, AnyStr]) -> List[File] data_dir = "{}-{}.data".format(name, version) return [ File("{}/{}".format(data_dir, name), ensure_binary(contents)) for name, contents in iteritems(files) ]
def set_alpn_protocols(self, protocols): """ Sets the ALPN protocols that will later be set on the context. Raises a NotImplementedError if ALPN is not supported. """ if not hasattr(Security, "SSLSetALPNProtocols"): raise NotImplementedError( "SecureTransport supports ALPN only in macOS 10.12+") self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
def create_basic_sdist_for_package( script, name, version, extra_files=None ): files = { "setup.py": """ from setuptools import find_packages, setup setup(name={name!r}, version={version!r}) """, } # Some useful shorthands archive_name = "{name}-{version}.tar.gz".format( name=name, version=version ) # Replace key-values with formatted values for key, value in list(files.items()): del files[key] key = key.format(name=name) files[key] = textwrap.dedent(value).format( name=name, version=version ).strip() # Add new files after formatting if extra_files: files.update(extra_files) for fname in files: path = script.temp_path / fname path.parent.mkdir(exist_ok=True, parents=True) path.write_bytes(ensure_binary(files[fname])) # The base_dir cast is required to make `shutil.make_archive()` use # Unicode paths on Python 2, making it able to properly archive # files with non-ASCII names. retval = script.scratch_path / archive_name generated = shutil.make_archive( retval, 'gztar', root_dir=script.temp_path, base_dir=text_type(os.curdir), ) shutil.move(generated, retval) shutil.rmtree(script.temp_path) script.temp_path.mkdir() return retval
def _get_statefile_name(key): # type: (Union[str, Text]) -> str key_bytes = ensure_binary(key) return hashlib.sha224(key_bytes).hexdigest()
def _get_statefile_name(key): # type: (str) -> str key_bytes = ensure_binary(key) name = hashlib.sha224(key_bytes).hexdigest() return name
def create_basic_wheel_for_package(script, name, version, depends=None, extras=None, extra_files=None): if depends is None: depends = [] if extras is None: extras = {} files = { "{name}/__init__.py": """ __version__ = {version!r} def hello(): return "Hello From {name}" """, "{dist_info}/DESCRIPTION": """ UNKNOWN """, "{dist_info}/WHEEL": """ Wheel-Version: 1.0 Generator: pip-test-suite Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any """, "{dist_info}/METADATA": """ Metadata-Version: 2.0 Name: {name} Version: {version} Summary: UNKNOWN Home-page: UNKNOWN Author: UNKNOWN Author-email: UNKNOWN License: UNKNOWN Platform: UNKNOWN {requires_dist} UNKNOWN """, "{dist_info}/top_level.txt": """ {name} """, # Have an empty RECORD because we don't want to be checking hashes. "{dist_info}/RECORD": "" } # Some useful shorthands archive_name = "{name}-{version}-py2.py3-none-any.whl".format( name=name, version=version) dist_info = "{name}-{version}.dist-info".format(name=name, version=version) requires_dist = "\n".join( ["Requires-Dist: {}".format(pkg) for pkg in depends] + ["Provides-Extra: {}".format(pkg) for pkg in extras.keys()] + [ "Requires-Dist: {}; extra == \"{}\"".format(pkg, extra) for extra in extras for pkg in extras[extra] ]) # Replace key-values with formatted values for key, value in list(files.items()): del files[key] key = key.format(name=name, dist_info=dist_info) files[key] = textwrap.dedent(value).format( name=name, version=version, requires_dist=requires_dist).strip() # Add new files after formatting if extra_files: files.update(extra_files) for fname in files: path = script.temp_path / fname path.parent.mkdir(exist_ok=True, parents=True) path.write_bytes(ensure_binary(files[fname])) # The base_dir cast is required to make `shutil.make_archive()` use # Unicode paths on Python 2, making it able to properly archive # files with non-ASCII names. retval = script.scratch_path / archive_name generated = shutil.make_archive( retval, 'zip', root_dir=script.temp_path, base_dir=text_type(os.curdir), ) shutil.move(generated, retval) shutil.rmtree(script.temp_path) script.temp_path.mkdir() return retval
def make_files(files): # type: (Dict[str, AnyStr]) -> List[File] return [ File(name, ensure_binary(contents)) for name, contents in files.items() ]
self._client_cert = certfile self._client_key = keyfile self._client_cert_passphrase = password <<<<<<< HEAD def set_alpn_protocols(self, protocols): """ Sets the ALPN protocols that will later be set on the context. Raises a NotImplementedError if ALPN is not supported. """ if not hasattr(Security, "SSLSetALPNProtocols"): raise NotImplementedError( "SecureTransport supports ALPN only in macOS 10.12+" ) self._alpn_protocols = [six.ensure_binary(p) for p in protocols] ======= >>>>>>> 74c061954d5e927be4caafbd793e96a50563c265 def wrap_socket( self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, server_hostname=None, ): # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. assert not server_side