def test_spawn(self): tmpdir = self.mkdtemp() # creating something executable # through the shell that returns 1 if os.name == 'posix': exe = os.path.join(tmpdir, 'foo.sh') self.write_file(exe, '#!/bin/sh\nexit 1') os.chmod(exe, 0777) else: exe = os.path.join(tmpdir, 'foo.bat') self.write_file(exe, 'exit 1') os.chmod(exe, 0777) self.assertRaises(DistutilsExecError, spawn, [exe]) # now something that works if os.name == 'posix': exe = os.path.join(tmpdir, 'foo.sh') self.write_file(exe, '#!/bin/sh\nexit 0') os.chmod(exe, 0777) else: exe = os.path.join(tmpdir, 'foo.bat') self.write_file(exe, 'exit 0') os.chmod(exe, 0777) spawn([exe]) # should work without any error
def _call_external_zip(directory, verbose=False): # XXX see if we want to keep an external call here if verbose: zipoptions = "-r" else: zipoptions = "-rq" from distutils2.errors import DistutilsExecError from distutils2.spawn import spawn try: spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". raise ExecError, ( "unable to create zip file '%s': " "could neither import the 'zipfile' module nor " "find a standalone zip utility" ) % zip_filename
def spawn(self, cmd, search_path=1, level=1): """Spawn an external command respecting dry-run flag.""" from distutils2.spawn import spawn spawn(cmd, search_path, dry_run= self.dry_run)
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None, logger=None): """Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "compress", "bzip2", or None. (compress will be deprecated in Python 3.2) 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_dir' + ".tar", possibly plus the appropriate compression extension (".gz", ".bz2" or ".Z"). Returns the output filename. """ tar_compression = {"gzip": "gz", "bzip2": "bz2", None: "", "compress": ""} compress_ext = {"gzip": ".gz", "bzip2": ".bz2", "compress": ".Z"} # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError, ("bad value for 'compress': must be None, 'gzip', 'bzip2' " "or 'compress'") archive_name = base_name + ".tar" if compress != "compress": archive_name += compress_ext.get(compress, "") archive_dir = os.path.dirname(archive_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s" % archive_dir) if not dry_run: os.makedirs(archive_dir) # creating the tarball from distutils2._backport import tarfile if logger is not None: logger.info("Creating tar archive") uid = _get_uid(owner) gid = _get_gid(group) def _set_uid_gid(tarinfo): if gid is not None: tarinfo.gid = gid tarinfo.gname = group if uid is not None: tarinfo.uid = uid tarinfo.uname = owner return tarinfo if not dry_run: tar = tarfile.open(archive_name, "w|%s" % tar_compression[compress]) try: tar.add(base_dir, filter=_set_uid_gid) finally: tar.close() # compression using `compress` # XXX this block will be removed in Python 3.2 if compress == "compress": warn("'compress' will be deprecated.", PendingDeprecationWarning) # the option varies depending on the platform compressed_name = archive_name + compress_ext[compress] if sys.platform == "win32": cmd = [compress, archive_name, compressed_name] else: cmd = [compress, "-f", archive_name] from distutils2.spawn import spawn spawn(cmd, dry_run=dry_run) return compressed_name return archive_name
def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant schema, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) if schema not in ('http', 'https'): raise AssertionError("unsupported schema " + schema) # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release content = open(filename,'rb').read() meta = self.distribution.metadata data = { # action ':action': 'file_upload', 'protcol_version': '1', # identify release 'name': meta['Name'], 'version': meta['Version'], # file content 'content': (os.path.basename(filename),content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), # additional meta-data # XXX Implement 1.1 'metadata_version' : '1.0', 'name': meta['Name'], 'version': meta['Version'], 'summary': meta['Summary'], 'home_page': meta['Home-page'], 'author': meta['Author'], 'author_email': meta['Author-email'], 'license': meta['License'], 'description': meta['Description'], 'keywords': meta['Keywords'], 'platform': meta['Platform'], 'classifiers': meta['Classifier'], 'download_url': meta['Download-URL'], #'provides': meta['Provides'], #'requires': meta['Requires'], #'obsoletes': meta['Obsoletes'], } comment = '' if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + ".asc", open(filename+".asc").read()) # set up the authentication auth = "Basic " + standard_b64encode(self.username + ":" + self.password) # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary end_boundary = sep_boundary + '--' body = StringIO.StringIO() for key, value in data.items(): # handle multiple entries for the same name if not isinstance(value, list): value = [value] for value in value: if isinstance(value, tuple): fn = ';filename="%s"' % value[0] value = value[1] else: fn = "" body.write(sep_boundary) body.write('\nContent-Disposition: form-data; name="%s"'%key) body.write(fn) body.write("\n\n") body.write(value) if value and value[-1] == '\r': body.write('\n') # write an extra newline (lurve Macs) body.write(end_boundary) body.write("\n") body = body.getvalue() self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) # build the Request headers = {'Content-type': 'multipart/form-data; boundary=%s' % boundary, 'Content-length': str(len(body)), 'Authorization': auth} request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) status = result.getcode() reason = result.msg except socket.error, e: self.announce(str(e), log.ERROR) return
def byte_compile(py_files, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None): """Byte-compile a collection of Python source files to either .pyc or .pyo files in the same directory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently skipped. 'optimize' must be one of the following: 0 - don't optimize (generate .pyc) 1 - normal optimization (like "python -O") 2 - extra optimization (like "python -OO") If 'force' is true, all files are recompiled regardless of timestamps. The source filename encoded in each bytecode file defaults to the filenames listed in 'py_files'; you can modify these with 'prefix' and 'basedir'. 'prefix' is a string that will be stripped off of each source filename, and 'base_dir' is a directory name that will be prepended (after 'prefix' is stripped). You can supply either or both (or neither) of 'prefix' and 'base_dir', as you wish. If 'dry_run' is true, doesn't actually do anything that would affect the filesystem. Byte-compilation is either done directly in this interpreter process with the standard py_compile module, or indirectly by writing a temporary script and executing it. Normally, you should let 'byte_compile()' figure out to use direct compilation or not (see the source for details). The 'direct' flag is used by the script generated in indirect mode; unless you know what you're doing, leave it set to None. """ # nothing is done if sys.dont_write_bytecode is True if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') # First, if the caller didn't force us into direct or indirect mode, # figure out which mode we should be in. We take a conservative # approach: choose direct mode *only* if the current interpreter is # in debug mode and optimize is 0. If we're not in debug mode (-O # or -OO), we don't know which level of optimization this # interpreter is running with, so we can't do direct # byte-compilation and be certain that it's the right thing. Thus, # always compile indirectly if the current interpreter is in either # optimize mode, or if either optimization level was requested by # the caller. if direct is None: direct = (__debug__ and optimize == 0) # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: try: from tempfile import mkstemp (script_fd, script_name) = mkstemp(".py") except ImportError: from tempfile import mktemp (script_fd, script_name) = None, mktemp(".py") log.info("writing byte-compilation script '%s'", script_name) if not dry_run: if script_fd is not None: script = os.fdopen(script_fd, "w") else: script = open(script_name, "w") script.write("""\ from distutils.util import byte_compile files = [ """) # XXX would be nice to write absolute filenames, just for # safety's sake (script should be more robust in the face of # chdir'ing before running it). But this requires abspath'ing # 'prefix' as well, and that breaks the hack in build_lib's # 'byte_compile()' method that carefully tacks on a trailing # slash (os.sep really) to make sure the prefix here is "just # right". This whole prefix business is rather delicate -- the # problem is that it's really a directory, but I'm treating it # as a dumb string, so trailing slashes and so forth matter. #py_files = map(os.path.abspath, py_files) #if prefix: # prefix = os.path.abspath(prefix) script.write(",\n".join(map(repr, py_files)) + "]\n") script.write(""" byte_compile(files, optimize=%r, force=%r, prefix=%r, base_dir=%r, verbose=%r, dry_run=0, direct=1) """ % (optimize, force, prefix, base_dir, verbose)) script.close() cmd = [sys.executable, script_name] if optimize == 1: cmd.insert(1, "-O") elif optimize == 2: cmd.insert(1, "-OO") spawn(cmd, dry_run=dry_run) execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) # "Direct" byte-compilation: use the py_compile module to compile # right here, right now. Note that the script generated in indirect # mode simply calls 'byte_compile()' in direct mode, a weird sort of # cross-process recursion. Hey, it works! else: from py_compile import compile for file in py_files: if file[-3:] != ".py": # This lets us be lazy and not filter filenames in # the "install_lib" command. continue # Terminology from the py_compile module: # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) cfile = file + (__debug__ and "c" or "o") dfile = file if prefix: if file[:len(prefix)] != prefix: raise ValueError("invalid prefix: filename %r doesn't " "start with %r" % (file, prefix)) dfile = dfile[len(prefix):] if base_dir: dfile = os.path.join(base_dir, dfile) cfile_base = os.path.basename(cfile) if direct: if force or newer(file, cfile): log.info("byte-compiling %s to %s", file, cfile_base) if not dry_run: compile(file, cfile, dfile) else: log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
def spawn(self, cmd): spawn(cmd, dry_run=self.dry_run)