def test_spawn(self):
        tmpdir = self.mkdtemp()

        # creating something executable
        # through the shell that returns 1
        if os.name == 'posix':
            exe = os.path.join(tmpdir, 'foo.sh')
            self.write_file(exe, '#!/bin/sh\nexit 1')
            os.chmod(exe, 0777)
        else:
            exe = os.path.join(tmpdir, 'foo.bat')
            self.write_file(exe, 'exit 1')

        os.chmod(exe, 0777)
        self.assertRaises(DistutilsExecError, spawn, [exe])

        # now something that works
        if os.name == 'posix':
            exe = os.path.join(tmpdir, 'foo.sh')
            self.write_file(exe, '#!/bin/sh\nexit 0')
            os.chmod(exe, 0777)
        else:
            exe = os.path.join(tmpdir, 'foo.bat')
            self.write_file(exe, 'exit 0')

        os.chmod(exe, 0777)
        spawn([exe])  # should work without any error
示例#2
0
    def test_tarfile_vs_tar(self):
        root_dir, base_dir = self._create_files()
        base_name = os.path.join(self.mkdtemp(), 'archive')
        tarball = make_archive(base_name, 'gztar', root_dir, base_dir)

        # check if the compressed tarball was created
        self.assertEqual(tarball, base_name + '.tar.gz')
        self.assertTrue(os.path.isfile(tarball))

        # now create another tarball using `tar`
        tarball2 = os.path.join(root_dir, 'archive2.tar')
        tar_cmd = ['tar', '-cf', 'archive2.tar', base_dir]
        with support.change_cwd(root_dir), captured_stdout():
            spawn(tar_cmd)

        self.assertTrue(os.path.isfile(tarball2))
        # let's compare both tarballs
        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))

        # trying an uncompressed one
        tarball = make_archive(base_name, 'tar', root_dir, base_dir)
        self.assertEqual(tarball, base_name + '.tar')
        self.assertTrue(os.path.isfile(tarball))

        # now for a dry_run
        tarball = make_archive(base_name, 'tar', root_dir, base_dir,
                               dry_run=True)
        self.assertEqual(tarball, base_name + '.tar')
        self.assertTrue(os.path.isfile(tarball))
def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0):
    """Create a (possibly compressed) tar file from all the files under
    'base_dir'.  'compress' must be "gzip" (the default), "compress",
    "bzip2", or None.  Both "tar" and the compression utility named by
    'compress' must be on the default program search path, so this is
    probably Unix-specific.  The output tar file will be named 'base_dir' +
    ".tar", possibly plus the appropriate compression extension (".gz",
    ".bz2" or ".Z").  Return the output filename.
    """
    # XXX GNU tar 1.13 has a nifty option to add a prefix directory.
    # It's pretty new, though, so we certainly can't require it --
    # but it would be nice to take advantage of it to skip the
    # "create a tree of hardlinks" step!  (Would also be nice to
    # detect GNU tar to use its 'z' option and save a step.)

    compress_ext = {"gzip": ".gz", "bzip2": ".bz2", "compress": ".Z"}

    # flags for compression program, each element of list will be an argument
    compress_flags = {"gzip": ["-f9"], "compress": ["-f"], "bzip2": ["-f9"]}

    if compress is not None and compress not in compress_ext.keys():
        raise ValueError, "bad value for 'compress': must be None, 'gzip', or 'compress'"

    archive_name = base_name + ".tar"
    mkpath(os.path.dirname(archive_name), dry_run=dry_run)
    cmd = ["tar", "-cf", archive_name, base_dir]
    spawn(cmd, dry_run=dry_run)

    if compress:
        spawn([compress] + compress_flags[compress] + [archive_name], dry_run=dry_run)
        return archive_name + compress_ext[compress]
    else:
        return archive_name
示例#4
0
   def build_man_page(self):
      """Build man pages for all installable programs."""
        
      self.announce("building man pages")

      descriptions = {}
      descriptions['synopsis'] = """simple frontend to the Synopsis framework, a multi-language source code introspection tool that
provides a variety of representations for the parsed code, to
enable further processing such as documentation extraction,
reverse engineering, and source-to-source translation."""
      
      descriptions['sxr-server'] = """the Synopsis Cross-Reference http server. Allows users
to query and browse cross-referenced source code."""
      

      help2man = find_executable('help2man')
      if not help2man:
         self.warn("cannot build man pages")
         return
      gzip = find_executable('gzip')

      section = 1
      man_dir = 'share/man/man%d'%section
      mkpath(man_dir, 0777, self.verbose, self.dry_run)

      for s in ['synopsis', 'sxr-server']:
         command = [help2man, '-N', '-n', descriptions[s]]
         executable = os.path.join('scripts', s)
         output = '%s/%s.%d'%(man_dir, s, section)
         command += ['-o', output, executable]
         spawn(command)
         if gzip:
            spawn(['gzip', '-f', output])
示例#5
0
 def build_win32_deps(self):
     ccase = ClearcaseConfig()
     if not os.path.exists(self.build_temp):
         os.makedirs(self.build_temp)
     lib_exe = _win32_find_exe('lib.exe')
     for k, v in ccase.winconfs.items():
         dll_pname = os.path.join(ccase.get_libdirs()[0], '%s.dll' % k)
         def_pname = os.path.join(self.build_temp, '%s.def' % v['prefix'])
         f = open(def_pname, 'w')
         f.write('LIBRARY %s\n' % k)
         f.write('EXPORTS\n')
         for func in v['funcs']:
             ordinal = _win32_get_ordinal(dll_pname, func)
             if ordinal == -1:
                 raise SetupError('Cannot get ordinal on %s()' % func)
             if func == 'cmdsyn_proc_table':
                 f.write('\t%s=_%s\tDATA\n' % (func, func))
             else:
                 f.write('\t%s=_%s\n' % (func, func))
         f.close()
         spawn.spawn([lib_exe,
                      '/MACHINE:I386',
                      '/DEF:%s' % def_pname,
                      '/OUT:%s.lib' % os.path.join(self.build_temp, v['prefix']),
                     ])
示例#6
0
文件: setup.py 项目: ofanoyi/khmer
    def run(self):
        """Run extension builder."""
        if "%x" % sys.maxsize != '7fffffffffffffff':
            raise DistutilsPlatformError("%s require 64-bit operating system" %
                                         SETUP_METADATA["packages"])

        if "z" not in self.libraries:
            zcmd = ['bash', '-c', 'cd ' + ZLIBDIR + ' && ( test Makefile -nt'
                    ' configure || bash ./configure --static ) && make -f '
                    'Makefile.pic PIC']
            spawn(cmd=zcmd, dry_run=self.dry_run)
            self.extensions[0].extra_objects.extend(
                path_join("third-party", "zlib", bn + ".lo") for bn in [
                    "adler32", "compress", "crc32", "deflate", "gzclose",
                    "gzlib", "gzread", "gzwrite", "infback", "inffast",
                    "inflate", "inftrees", "trees", "uncompr", "zutil"])
        if "bz2" not in self.libraries:
            bz2cmd = ['bash', '-c', 'cd ' + BZIP2DIR + ' && make -f '
                      'Makefile-libbz2_so all']
            spawn(cmd=bz2cmd, dry_run=self.dry_run)
            self.extensions[0].extra_objects.extend(
                path_join("third-party", "bzip2", bn + ".o") for bn in [
                    "blocksort", "huffman", "crctable", "randtable",
                    "compress", "decompress", "bzlib"])
        _build_ext.run(self)
示例#7
0
    def run(self):
        """Run this command, i.e. do the actual document generation."""

        tempdir = os.path.abspath(os.path.join(self.build_temp, 'doc'))
        srcdir = os.getcwd()
        self.mkpath(tempdir)

        generator = self.generator
        args = self.args or ''

        if not generator:
            generator = find_executable('epydoc')
            if generator:
                args = '-o share/doc/qmtest/html/manual'

        if not generator:
            generator = find_executable('happydoc')
            if generator:
                args = '-d share/doc/qmtest/html/manual'

        if not generator:
            self.warn("could not find either of epydoc or happydoc in PATH")
        else:
            self.announce("building reference manual")
            spawn([generator] + args.split() + ['qm'])
示例#8
0
    def MakePackageZIP(self, flist):
        files = " ".join(flist)

        if self.flagVerbose:
            print "compressing %s..." % self.parOutput

        cwd = os.getcwd()
        os.chdir(self.parOutputPath)
        cmd = "zip -9 -j "
        if not self.flagVerbose:
            cmd += "-q "
        cmd += self.parOutput + " " + files

        from distutils.spawn import spawn
        try:
            spawn(cmd.split())
            success = True
        except:
            success = False
            
        os.chdir(cwd)
    
        if not success:
            print "Unable to execute zip program. Make sure it is in the path."
            print "You can download it at http://www.cdrom.com/pub/infozip/"
            self.retCode = 1
示例#9
0
 def compress_assets(self):
     if not self.compress:
         for dir, files in self.distribution.data_files:
             for f in files:
                 if f.find("-min.") >= 0:
                     print("removing", f)
                     os.remove(f)
     else:
         rxfileref = re.compile(r"(/[-A-Za-z0-9_]+?)(?:-min)?(\.(html|js|css))")
         for dir, files in self.distribution.data_files:
             files = [f for f in files if f.find("-min.") < 0]
             if not files:
                 continue
             elif dir == 'data/javascript':
                 spawn(['java', '-jar', os.environ["YUICOMPRESSOR"], '--type', 'js',
                        '-o', (len(files) > 1 and '.js$:-min.js')
                               or files[0].replace(".js", "-min.js")]
                       + files)
             elif dir == 'data/css':
                 spawn(['java', '-jar', os.environ["YUICOMPRESSOR"], '--type', 'css',
                        '-o', (len(files) > 1 and '.css$:-min.css')
                               or files[0].replace(".css", "-min.css")]
                       + files)
             elif dir == 'data/templates':
                 for f in files:
                     if f.endswith(".html"):
                         print("minifying", f)
                         minified = open(f).read()
                         minified = re.sub(re.compile(r"\n\s*([<>])", re.S), r"\1", minified)
                         minified = re.sub(re.compile(r"\n\s*", re.S), " ", minified)
                         minified = re.sub(r"<!-- (.*?) -->", "", minified)
                         minified = re.sub(rxfileref, r"\1-min\2", minified)
                         open(f.replace(".html", "-min.html"), "w").write(minified)
示例#10
0
    def merge_desktop(self, src, dest):
        log.info("Merging desktop file %s" % src)
        if self.lint:
            dest = '/dev/null'

        cmd = ['intltool-merge', '-d', 'po', src, dest]
        spawn(cmd)
示例#11
0
def pytest_configure(config):
    os.chdir(os.path.dirname(__file__))
    cmd = [sys.executable, "setup.py", "-q", "build_ext", "-q"]
    spawn(cmd, search_path=0)

    from tests import build_test_extensions
    build_test_extensions()
示例#12
0
文件: setup.py 项目: dmwm/overview
  def compress_assets(self):
    if not self.compress:
      for dir, files in self.distribution.data_files:
        for f in files:
          if f.find("-min.") >= 0:
	    print "removing", f
            os.remove(f)
    else:
      rxfileref = re.compile(r"(/[-A-Za-z0-9_]+?)(?:-min)?(\.(js|css))")
      for dir, files in self.distribution.data_files:
        files = [f for f in files if f.find("-min.") < 0]
        jsfiles = [f for f in files if f.endswith(".js")]
        cssfiles = [f for f in files if f.endswith(".css")]
        htmlfiles = [f for f in files if f.endswith(".html")]
        if jsfiles:
          spawn(['java', '-jar', os.environ["YUICOMPRESSOR"], '--type', 'js',
                 '-o', (len(jsfiles) > 1 and '.js$:-min.js')
                        or jsfiles[0].replace(".js", "-min.js")]
                + jsfiles)
        if cssfiles:
          spawn(['java', '-jar', os.environ["YUICOMPRESSOR"], '--type', 'css',
                 '-o', (len(cssfiles) > 1 and '.css$:-min.css')
                        or cssfiles[0].replace(".css", "-min.css")]
                + cssfiles)
        for f in htmlfiles:
	  print "minifying", f
          minified = open(f).read()
          minified = re.sub(re.compile(r"\n\s*([<>])", re.S), r"\1", minified)
          minified = re.sub(re.compile(r"\n\s*", re.S), " ", minified)
          minified = re.sub(r"<!-- (.*?) -->", "", minified)
          minified = re.sub(rxfileref, r"\1-min\2", minified)
          open(f.replace(".html", "-min.html"), "w").write(minified)
示例#13
0
文件: install_lib.py 项目: Macowe/jep
    def link_native_lib(self, jep_dir, jep_lib):
        # we'll put the jep_dir as -Djava.library.path in the jep script
        if is_windows():
            jep_dll = os.path.join(jep_dir, 'jep.dll')
            # Remove the old DLL if it exists to avoid a file move error.
            if os.path.exists(jep_dll):
                os.remove(jep_dll)
            # Do not use 'spawn' as that will run as a non-administrative user
            # that may no longer have access to the destination directory.
            self.move_file(os.path.join(jep_dir, jep_lib), jep_dll)

        elif is_osx():
            # Apple says to put the file at /Library/Java/Extensions/libjep.jnilib,
            # which is good for a permanent install but not so good when using
            # virtualenv or testing
            spawn(['ln',
                   '-sf',
                   '{0}'.format(jep_lib),
                   '{0}'.format(os.path.join(jep_dir, 'libjep.jnilib')),])

        else:
            # otherwise, distutils outputs 'jep.so' which needs to be linked
            # to libjep.so. The JVM will not find the library without.
            spawn(['ln',
                   '-sf',
                   '{0}'.format(jep_lib),
                   '{0}'.format(os.path.join(jep_dir, 'libjep.so')),
                   ])
示例#14
0
    def test_spawn(self):
        tmpdir = self.mkdtemp()

        # creating something executable
        # through the shell that returns 1
        if sys.platform != 'win32':
            exe = os.path.join(tmpdir, 'foo.sh')
            self.write_file(exe, '#!%s\nexit 1' % unix_shell)
        else:
            exe = os.path.join(tmpdir, 'foo.bat')
            self.write_file(exe, 'exit 1')

        os.chmod(exe, 0o777)
        self.assertRaises(DistutilsExecError, spawn, [exe])

        # now something that works
        if sys.platform != 'win32':
            exe = os.path.join(tmpdir, 'foo.sh')
            self.write_file(exe, '#!%s\nexit 0' % unix_shell)
        else:
            exe = os.path.join(tmpdir, 'foo.bat')
            self.write_file(exe, 'exit 0')

        os.chmod(exe, 0o777)
        spawn([exe])  # should work without any error
示例#15
0
def make_zipfile(base_name, base_dir, verbose = 0, dry_run = 0):
    try:
        import zipfile
    except ImportError:
        zipfile = None

    zip_filename = base_name + '.zip'
    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    if zipfile is None:
        if verbose:
            zipoptions = '-r'
        else:
            zipoptions = '-rq'
        try:
            spawn(['zip',
             zipoptions,
             zip_filename,
             base_dir], dry_run=dry_run)
        except DistutilsExecError:
            raise DistutilsExecError, "unable to create zip file '%s': could neither import the 'zipfile' module nor find a standalone zip utility" % zip_filename

    else:
        log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
        if not dry_run:
            zip = zipfile.ZipFile(zip_filename, 'w', compression=zipfile.ZIP_DEFLATED)
            for dirpath, dirnames, filenames in os.walk(base_dir):
                for name in filenames:
                    path = os.path.normpath(os.path.join(dirpath, name))
                    if os.path.isfile(path):
                        zip.write(path, path)
                        log.info("adding '%s'" % path)

            zip.close()
    return zip_filename
示例#16
0
    def build_extension(self, ext, copy=True):

        self.announce("building '%s' in %s"%(ext[1], ext[0]))

        if os.name == 'nt': 
            # same as in config.py here: even on 'nt' we have to
            # use posix paths because we run in a cygwin shell at this point
            path = self.build_temp.replace('\\', '/') + '/' + ext[0]
            temp_target = self.build_temp.replace('\\', '/') + '/' + ext[0]
        else:
            path = os.path.join(self.build_temp, ext[0])
            temp_target = os.path.join(self.build_temp, ext[0])
        
        make = os.environ.get('MAKE', 'make')

        command = '%s -C "%s" %s'%(make, path, ext[1])
        spawn(['sh', '-c', command], self.verbose, self.dry_run)

        #The extension may not be compiled. For now just skip it.
        if copy and os.path.isfile(os.path.join(temp_target, ext[1])):
            
            if self.inplace: build_path = ext[0]
            else: build_path = os.path.join(self.build_lib, ext[0])            
            mkpath (build_path, 0777, self.verbose, self.dry_run)
            copy_file(os.path.join(path, ext[1]),
                      os.path.join(build_path, ext[1]),
                      1, 1, 0, None, self.verbose, self.dry_run)
示例#17
0
    def check_serial(self, qmtest, options):
        """Perform serial tests."""

        cmd = [qmtest] + options + \
              ['-D', 'tests', 'run', '-c',
               norm('qmtest_path=' + qmtest)]
        spawn(cmd)
示例#18
0
    def run(self):
        # call build sphinx to build docs
        self.run_command("build_sphinx")
        cmd = self.get_finalized_command("build_sphinx")
        source_dir = cmd.builder_target_dir

        # copy to directory with appropriate name
        dist = self.distribution
        arc_name = "%s-docs-%s" % (dist.get_name(), dist.get_version())
        tmp_dir = os.path.join(self.build_dir, arc_name)
        if os.path.exists(tmp_dir):
            dir_util.remove_tree(tmp_dir, dry_run=self.dry_run)
        self.copy_tree(source_dir, tmp_dir, preserve_symlinks=True)

        # make archive from dir
        arc_base = os.path.join(self.dist_dir, arc_name)
        self.arc_filename = self.make_archive(arc_base, self.format,
                                              self.build_dir)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", self.arc_filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # cleanup
        if not self.keep_temp:
            dir_util.remove_tree(tmp_dir, dry_run=self.dry_run)
示例#19
0
    def compile(self, src, dest):
        log.info("Compiling %s" % src)
        if self.lint:
            dest = '/dev/null'

        cmd = ['msgfmt', '--check', '--statistics', '-o', dest, src]
        spawn(cmd)
示例#20
0
 def buildDocsFromSource(self):
     srcdir = '../../..'
     docdir = os.path.join(srcdir, 'Doc')
     htmldir = os.path.join(docdir, 'html')
     spawn(('make','--directory', docdir, 'html'), 1, self.verbose, self.dry_run)
     self.mkpath(self.build_html)
     copy_tree(htmldir, self.build_html)
示例#21
0
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
    """Create a zip file from all the files under 'base_dir'.

    The output zip file will be named 'base_name' + ".zip".  Uses either the
    "zipfile" Python module (if available) or the InfoZIP "zip" utility
    (if installed and found on the default search path).  If neither tool is
    available, raises DistutilsExecError.  Returns the name of the output zip
    file.
    """
    zip_filename = base_name + ".zip"
    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)

    # If zipfile module is not available, try spawning an external
    # 'zip' command.
    if zipfile is None:
        if verbose:
            zipoptions = "-r"
        else:
            zipoptions = "-rq"

        try:
            spawn(["zip", zipoptions, zip_filename, base_dir],
                  dry_run=dry_run)
        except DistutilsExecError:
            # XXX really should distinguish between "couldn't find
            # external 'zip' command" and "zip failed".
            raise DistutilsExecError(("unable to create zip file '%s': "
                   "could neither import the 'zipfile' module nor "
                   "find a standalone zip utility") % zip_filename)

    else:
        log.info("creating '%s' and adding '%s' to it",
                 zip_filename, base_dir)

        if not dry_run:
            try:
                zip = zipfile.ZipFile(zip_filename, "w",
                                      compression=zipfile.ZIP_DEFLATED)
            except RuntimeError:
                zip = zipfile.ZipFile(zip_filename, "w",
                                      compression=zipfile.ZIP_STORED)

            with zip:
                if base_dir != os.curdir:
                    path = os.path.normpath(os.path.join(base_dir, ''))
                    zip.write(path, path)
                    log.info("adding '%s'", path)
                for dirpath, dirnames, filenames in os.walk(base_dir):
                    for name in dirnames:
                        path = os.path.normpath(os.path.join(dirpath, name, ''))
                        zip.write(path, path)
                        log.info("adding '%s'", path)
                    for name in filenames:
                        path = os.path.normpath(os.path.join(dirpath, name))
                        if os.path.isfile(path):
                            zip.write(path, path)
                            log.info("adding '%s'", path)

    return zip_filename
示例#22
0
    def run(self):
        if sys.platform == 'win32':
            # try to guess the path of the gettext utilities
            os.environ['PATH'] = os.environ['PATH'] + \
                ';c:\\Program Files\\GnuWin32\\bin'
        if not spawn.find_executable('msgfmt'):
            msg = '** Error: Building Bauble requires the gettext utilities ' \
                  'be installed.  If they are installed please ensure that ' \
                  'the msgfmt command is in your PATH'
            print msg
            sys.exit(1)

        _build.run(self)

        # create build/share directory
        dir_util.mkpath(os.path.join(self.build_base, 'share'))

        dest_tmpl = os.path.join(self.build_base, locale_path, '%s',
                                 'LC_MESSAGES')
        matches = glob.glob('po/*.po')
        from bauble.i18n import TEXT_DOMAIN
        for po in matches:
            # create an .mo in build/share/locale/$LANG/LC_MESSAGES
            loc, ext = os.path.splitext(os.path.basename(po))
            localedir = dest_tmpl % loc

            mo = '%s/%s.mo' % (localedir, TEXT_DOMAIN)
            if not os.path.exists(localedir):
                dir_util.mkpath(localedir)
            if not os.path.exists(mo) or dep_util.newer(po, mo):
                spawn.spawn(['msgfmt', po, '-o', mo])

        # copy .desktop and icons
        if sys.platform in ('linux3', 'linux2'):
            app_dir = os.path.join(self.build_base, 'share', 'applications')
            dir_util.mkpath(app_dir)
            file_util.copy_file('data/bauble.desktop', app_dir)

            icon_sizes = [16, 22, 24, 32, 48, 64]
            icon_root = os.path.join(
                self.build_base, 'share', 'icons', 'hicolor')

            # copy scalable icon
            scalable_dir = os.path.join(icon_root, 'scalable', 'apps')
            dir_util.mkpath(scalable_dir)
            file_util.copy_file('data/bauble.svg', scalable_dir)

            pixmaps_dir = os.path.join(self.build_base, 'share', 'pixmaps')
            dir_util.mkpath(pixmaps_dir)
            file_util.copy_file('data/bauble.svg', pixmaps_dir)

            # copy .png icons
            dimension = lambda s: '%sx%s' % (s, s)
            for size in icon_sizes:
                img = 'data/bauble-%s.png' % size
                dest = os.path.join(icon_root, '%s/apps/bauble.png'
                                    % dimension(size))
                dir_util.mkpath(os.path.split(dest)[0])
                file_util.copy_file(img, dest)
示例#23
0
文件: java.py 项目: ndjensen/jep
 def build(self, *jclasses):
     jep = [x for x in list(*jclasses) if not x.startswith('src{0}jep{0}test{0}'.format(os.sep))]
     tests = [x for x in list(*jclasses) if x.startswith('src{0}jep{0}test{0}'.format(os.sep))]
     spawn([self.javac, '-deprecation', '-d', build_java.outdir, '-classpath', 'src'] + jep)
     spawn([self.javac, '-deprecation', '-d', build_java.testoutdir, '-classpath', '{0}{1}src'.format(build_java.outdir, os.pathsep)] + tests)
     # Copy the source files over to the build directory to make src.jar's.
     self.copySrc('jep', jep)
     self.copySrc('jep.test', tests)
示例#24
0
def design_docs_setup():
  """Pushes the CouchDB design docs to the database.

  These documents are required.

  """
  os.chdir(os.path.join(os.path.dirname(__file__), "couchdb_design"))
  spawn(['couchapp', 'push', '.', config.get('database', 'couchapp_dest')])
示例#25
0
文件: setup.py 项目: krrr/Hazama
 def run(self):
     spawn([sys.executable, pjoin('utils', 'setupfreeze.py'), 'build_exe'])
     # remove duplicate python DLL
     try:
         dll_path = glob(pjoin('build', 'python*.dll'))[0]
         os.remove(pjoin('build', 'lib', os.path.basename(dll_path)))
     except IndexError:
         pass
示例#26
0
def bower_setup():
  """Fetches Bower dependencies.

  Dependencies are specified in bower.json.

  """
  os.chdir(os.path.dirname(__file__))
  spawn(['bower', 'install'])
 def run(self):
     prog = find_executable('epydoc')
     pkg_dirs = [change_root(self.build_lib, pkg) for pkg in self.distribution.packages]
     cmd = [prog, '-v', '--%s' % self.action, '--docformat', 'restructuredtext', '-o', self.docdir]
     #if self.verbose: cmd.append('-v')
     cmd.extend(pkg_dirs)
     self.mkpath(self.docdir)
     spawn(cmd)
示例#28
0
def have_libvirt_lxc():
    try:
        spawn([get_pkgcfg(),
               "--atleast-version=%s" % MIN_LIBVIRT_LXC,
             "libvirt"])
        return True
    except DistutilsExecError:
        return False
示例#29
0
    def run(self):
        if sys.platform == "win32":
            # try to guess the path of the gettext utilities
            os.environ["PATH"] = os.environ["PATH"] + ";c:\\Program Files\\GnuWin32\\bin"
        if not spawn.find_executable("msgfmt"):
            msg = (
                "** Error: Building Bauble requires the gettext utilities "
                "be installed.  If they are installed please ensure that "
                "the msgfmt command is in your PATH"
            )
            print msg
            sys.exit(1)

        _build.run(self)

        # create build/share directory
        dir_util.mkpath(os.path.join(self.build_base, "share"))

        dest_tmpl = os.path.join(self.build_base, locale_path, "%s", "LC_MESSAGES")
        matches = glob.glob("po/*.po")
        from bauble.i18n import TEXT_DOMAIN

        for po in matches:
            # create an .mo in build/share/locale/$LANG/LC_MESSAGES
            loc, ext = os.path.splitext(os.path.basename(po))
            localedir = dest_tmpl % loc
            # mo = '%s/bauble-1.mo' % localedir
            mo = "%s/%s.mo" % (localedir, TEXT_DOMAIN)
            if not os.path.exists(localedir):
                dir_util.mkpath(localedir)
            if not os.path.exists(mo) or dep_util.newer(po, mo):
                spawn.spawn(["msgfmt", po, "-o", mo])

        # copy .desktop and icons
        if sys.platform == "linux2":
            app_dir = os.path.join(self.build_base, "share/applications")
            dir_util.mkpath(app_dir)
            file_util.copy_file("data/bauble.desktop", app_dir)

            icon_sizes = [16, 22, 24, 32, 48, 64]  # , 128]
            icon_root = os.path.join(self.build_base, "share/icons/hicolor")

            # copy scalable icon
            scalable_dir = os.path.join(icon_root, "scalable/apps")
            dir_util.mkpath(scalable_dir)
            file_util.copy_file("data/bauble.svg", scalable_dir)

            pixmaps_dir = os.path.join(self.build_base, "share/pixmaps")
            dir_util.mkpath(pixmaps_dir)
            file_util.copy_file("data/bauble.svg", pixmaps_dir)

            # copy .png icons
            dimension = lambda s: "%sx%s" % (s, s)
            for size in icon_sizes:
                img = "data/bauble-%s.png" % size
                dest = os.path.join(icon_root, "%s/apps/bauble.png" % dimension(size))
                dir_util.mkpath(os.path.split(dest)[0])
                file_util.copy_file(img, dest)
示例#30
0
文件: setup.py 项目: RazerM/llvmlite
 def run(self):
     build_ext.run(self)
     cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')]
     spawn(cmd, dry_run=self.dry_run)
     # HACK: this makes sure the library file (which is large) is only
     # included in binary builds, not source builds.
     self.distribution.package_data = {
         "llvmlite.binding": get_library_files(),
     }
示例#31
0
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data
        f = open(filename, 'rb')
        content = f.read()
        f.close()
        basename = os.path.basename(filename)
        comment = ''
        if command == 'bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action': 'file_upload',
            'protocol_version': '1',
            'name': self.distribution.get_name(),
            'version': self.distribution.get_version(),
            'content': (basename, content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),
        }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            asc_file = open(filename + ".asc")
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     asc_file.read())
            asc_file.close()

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" +
                                              self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in list(data.items()):
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository),
                      log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urllib.parse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = http.client.HTTPConnection(netloc)
        elif schema == 'https':
            http = http.client.HTTPSConnection(netloc)
        else:
            raise AssertionError("unsupported schema " + schema)

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s' % boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error as e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print('-' * 75, r.read(), '-' * 75)
示例#32
0
 def spawn (self, cmd, search_path=1, level=1):
     """Spawn an external command respecting verbose and dry-run flags."""
     from distutils.spawn import spawn
     spawn(cmd, search_path,
           self.verbose >= level,
           self.dry_run)
示例#33
0
def make_tarball(base_name,
                 base_dir,
                 compress="gzip",
                 verbose=0,
                 dry_run=0,
                 owner=None,
                 group=None):
    """Create a (possibly compressed) tar file from all the files under
    'base_dir'.

    'compress' must be "gzip" (the default), "compress", "bzip2", or None.
    (compress will be deprecated in Python 3.2)

    'owner' and 'group' can be used to define an owner and a group for the
    archive that is being built. If not provided, the current owner and group
    will be used.

    The output tar file will be named 'base_dir' +  ".tar", possibly plus
    the appropriate compression extension (".gz", ".bz2" or ".Z").

    Returns the output filename.
    """
    tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''}
    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'}

    # flags for compression program, each element of list will be an argument
    if compress is not None and compress not in compress_ext.keys():
        raise ValueError, \
              ("bad value for 'compress': must be None, 'gzip', 'bzip2' "
               "or 'compress'")

    archive_name = base_name + '.tar'
    if compress != 'compress':
        archive_name += compress_ext.get(compress, '')

    mkpath(os.path.dirname(archive_name), dry_run=dry_run)

    # creating the tarball
    import tarfile  # late import so Python build itself doesn't break

    log.info('Creating tar archive')

    uid = _get_uid(owner)
    gid = _get_gid(group)

    def _set_uid_gid(tarinfo):
        if gid is not None:
            tarinfo.gid = gid
            tarinfo.gname = group
        if uid is not None:
            tarinfo.uid = uid
            tarinfo.uname = owner
        return tarinfo

    if not dry_run:
        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
        try:
            tar.add(base_dir, filter=_set_uid_gid)
        finally:
            tar.close()

    # compression using `compress`
    if compress == 'compress':
        warn("'compress' will be deprecated.", PendingDeprecationWarning)
        # the option varies depending on the platform
        compressed_name = archive_name + compress_ext[compress]
        if sys.platform == 'win32':
            cmd = [compress, archive_name, compressed_name]
        else:
            cmd = [compress, '-f', archive_name]
        spawn(cmd, dry_run=dry_run)
        return compressed_name

    return archive_name
    def copy_tree(self,
                  infile,
                  outfile,
                  preserve_mode=1,
                  preserve_times=1,
                  preserve_symlinks=0,
                  level=1):
        """Copy the build directory tree, respecting dry-run and force flags.
        Special treatment of libtool files.
        """
        if not self.dry_run and not os.path.isdir(infile):
            raise DistutilsFileError("cannot copy tree '%s': not a directory" %
                                     infile)
        try:
            names = os.listdir(infile)
        except OSError as e:
            if self.dry_run:
                names = []
            else:
                raise DistutilsFileError("error listing files in '%s': %s" %
                                         (infile, e.strerror))

        if not self.dry_run:
            mkpath(outfile)

        outputs = []

        for n in names:
            src_name = os.path.join(infile, n)
            dst_name = os.path.join(outfile, n)

            if n.startswith('.nfs'):
                # skip NFS rename files
                continue
            if n in ('.libs', '_libs'):
                # skip libtool directories
                continue

            if preserve_symlinks and os.path.islink(src_name):
                link_dest = os.readlink(src_name)
                log.info("linking %s -> %s", dst_name, link_dest)
                if not self.dry_run:
                    os.symlink(link_dest, dst_name)
                outputs.append(dst_name)

            elif os.path.isdir(src_name):
                outputs.extend(
                    self.copy_tree(src_name, dst_name, preserve_mode,
                                   preserve_times, preserve_symlinks))

            elif n.endswith('.la'):
                spawn(self.cmd_libtool_install + [src_name, dst_name],
                      dry_run=self.dry_run)

            else:
                copy_file(src_name,
                          dst_name,
                          preserve_mode,
                          preserve_times,
                          not self.force,
                          dry_run=self.dry_run)
                outputs.append(dst_name)

        return outputs
示例#35
0
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        content = open(filename, 'rb').read()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename), content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version': '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
        }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename + ".asc").read())

        # set up the authentication
        user_pass = (self.username + ":" + self.password).encode('ascii')
        # The exact encoding of the authentication string is debated.
        # Anyway PyPI only accepts ascii for both username or password.
        auth = "Basic " + base64.encodestring(user_pass).strip().decode(
            'ascii')

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = b'\n--' + boundary.encode('ascii')
        end_boundary = sep_boundary + b'--'
        body = io.BytesIO()
        for key, value in data.items():
            title = '\nContent-Disposition: form-data; name="%s"' % key
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    title += '; filename="%s"' % value[0]
                    value = value[1]
                else:
                    value = str(value).encode('utf-8')
                body.write(sep_boundary)
                body.write(title.encode('utf-8'))
                body.write(b"\n\n")
                body.write(value)
                if value and value[-1:] == b'\r':
                    body.write(b'\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write(b"\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository),
                      log.INFO)

        # build the Request
        # We can't use urllib since we need to send the Basic
        # auth right with the first request
        # TODO(jhylton): Can we fix urllib?
        schema, netloc, url, params, query, fragments = \
            urllib.parse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httpclient.HTTPConnection(netloc)
        elif schema == 'https':
            http = httpclient.HTTPSConnection(netloc)
        else:
            raise AssertionError("unsupported schema " + schema)

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s' % boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error as e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print('-' * 75, r.read(), '-' * 75)
示例#36
0
 def generate_docs(self):
   if not self.skip_docs:
     os.environ["PYTHONPATH"] = "%s/../WMCore/src/python/:%s" % (os.getcwd(), os.environ["PYTHONPATH"])
     os.environ["PYTHONPATH"] = "%s/build/lib:%s" % (os.getcwd(), os.environ["PYTHONPATH"])
     spawn(['make', '-C', 'doc', 'html', 'PROJECT=%s' % 'crabserver' ])
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
    """Create a zip file from all the files under 'base_dir'.

    The output zip file will be named 'base_name' + ".zip".  Uses either the
    "zipfile" Python module (if available) or the InfoZIP "zip" utility
    (if installed and found on the default search path).  If neither tool is
    available, raises DistutilsExecError.  Returns the name of the output zip
    file.
    """
    zip_filename = base_name + ".zip"
    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)

    # If zipfile module is not available, try spawning an external
    # 'zip' command.
    if zipfile is None:
        if verbose:
            zipoptions = "-r"
        else:
            zipoptions = "-rq"

        try:
            spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
        except DistutilsExecError:
            # XXX really should distinguish between "couldn't find
            # external 'zip' command" and "zip failed".
            raise DistutilsExecError(
                (
                    "unable to create zip file '%s': "
                    "could neither import the 'zipfile' module nor "
                    "find a standalone zip utility"
                )
                % zip_filename
            )

    else:
        log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)

        if not dry_run:
            try:
                zip = zipfile.ZipFile(
                    zip_filename, "w", compression=zipfile.ZIP_DEFLATED
                )
            except RuntimeError:
                zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)

            with zip:
                if base_dir != os.curdir:
                    path = os.path.normpath(os.path.join(base_dir, ""))
                    zip.write(path, path)
                    log.info("adding '%s'", path)
                for dirpath, dirnames, filenames in os.walk(base_dir):
                    for name in dirnames:
                        path = os.path.normpath(os.path.join(dirpath, name, ""))
                        zip.write(path, path)
                        log.info("adding '%s'", path)
                    for name in filenames:
                        path = os.path.normpath(os.path.join(dirpath, name))
                        if os.path.isfile(path):
                            zip.write(path, path)
                            log.info("adding '%s'", path)

    return zip_filename
示例#38
0
def byte_compile(py_files, optimize=0, force=0, prefix=None, base_dir=None,
    verbose=1, dry_run=0, direct=None):
    """Byte-compile a collection of Python source files to .pyc
    files in a __pycache__ subdirectory.  'py_files' is a list
    of files to compile; any files that don't end in ".py" are silently
    skipped.  'optimize' must be one of the following:
      0 - don't optimize
      1 - normal optimization (like "python -O")
      2 - extra optimization (like "python -OO")
    If 'force' is true, all files are recompiled regardless of
    timestamps.

    The source filename encoded in each bytecode file defaults to the
    filenames listed in 'py_files'; you can modify these with 'prefix' and
    'basedir'.  'prefix' is a string that will be stripped off of each
    source filename, and 'base_dir' is a directory name that will be
    prepended (after 'prefix' is stripped).  You can supply either or both
    (or neither) of 'prefix' and 'base_dir', as you wish.

    If 'dry_run' is true, doesn't actually do anything that would
    affect the filesystem.

    Byte-compilation is either done directly in this interpreter process
    with the standard py_compile module, or indirectly by writing a
    temporary script and executing it.  Normally, you should let
    'byte_compile()' figure out to use direct compilation or not (see
    the source for details).  The 'direct' flag is used by the script
    generated in indirect mode; unless you know what you're doing, leave
    it set to None.
    """
    import subprocess
    if sys.dont_write_bytecode:
        raise DistutilsByteCompileError('byte-compiling is disabled.')
    if direct is None:
        direct = __debug__ and optimize == 0
    if not direct:
        try:
            from tempfile import mkstemp
            script_fd, script_name = mkstemp('.py')
        except ImportError:
            from tempfile import mktemp
            script_fd, script_name = None, mktemp('.py')
        log.info("writing byte-compilation script '%s'", script_name)
        if not dry_run:
            if script_fd is not None:
                script = os.fdopen(script_fd, 'w')
            else:
                script = open(script_name, 'w')
            script.write('from distutils.util import byte_compile\nfiles = [\n'
                )
            script.write(',\n'.join(map(repr, py_files)) + ']\n')
            script.write(
                """
byte_compile(files, optimize=%r, force=%r,
             prefix=%r, base_dir=%r,
             verbose=%r, dry_run=0,
             direct=1)
"""
                 % (optimize, force, prefix, base_dir, verbose))
            script.close()
        cmd = [sys.executable]
        cmd.extend(subprocess._optim_args_from_interpreter_flags())
        cmd.append(script_name)
        spawn(cmd, dry_run=dry_run)
        execute(os.remove, (script_name,), 'removing %s' % script_name,
            dry_run=dry_run)
    else:
        from py_compile import compile
        for file in py_files:
            if file[-3:] != '.py':
                continue
            if optimize >= 0:
                opt = '' if optimize == 0 else optimize
                cfile = importlib.util.cache_from_source(file, optimization=opt
                    )
            else:
                cfile = importlib.util.cache_from_source(file)
            dfile = file
            if prefix:
                if file[:len(prefix)] != prefix:
                    raise ValueError(
                        "invalid prefix: filename %r doesn't start with %r" %
                        (file, prefix))
                dfile = dfile[len(prefix):]
            if base_dir:
                dfile = os.path.join(base_dir, dfile)
            cfile_base = os.path.basename(cfile)
            if direct:
                if force or newer(file, cfile):
                    log.info('byte-compiling %s to %s', file, cfile_base)
                    if not dry_run:
                        compile(file, cfile, dfile)
                else:
                    log.debug('skipping byte-compilation of %s to %s', file,
                        cfile_base)
示例#39
0
文件: setup.py 项目: yangkf1985/numba
 def run(self):
     spawn(['make', '-C', 'docs', 'html'])
示例#40
0
 def spawn(self, cmd, search_path=1, verbose=0, dry_run=0):
     """
     Perform any CUDA specific customizations before actually launching
     compile/link etc. commands.
     """
     if (sys.platform == 'darwin' and len(cmd) >= 2 and cmd[0] == 'nvcc' and
             cmd[1] == '--shared' and cmd.count('-arch') > 0):
         # Versions of distutils on OSX earlier than 2.7.9 inject
         # '-arch x86_64' which we need to strip while using nvcc for
         # linking
         while True:
             try:
                 index = cmd.index('-arch')
                 del cmd[index:index+2]
             except ValueError:
                 break
     elif self.compiler.compiler_type == 'msvc':
         # There are several things we need to do to change the commands
         # issued by MSVCCompiler into one that works with nvcc. In the end,
         # it might have been easier to write our own CCompiler class for
         # nvcc, as we're only interested in creating a shared library to
         # load with ctypes, not in creating an importable Python extension.
         # - First, we replace the cl.exe or link.exe call with an nvcc
         #   call. In case we're running Anaconda, we search cl.exe in the
         #   original search path we captured further above -- Anaconda
         #   inserts a MSVC version into PATH that is too old for nvcc.
         cmd[:1] = ['nvcc', '--compiler-bindir',
                    os.path.dirname(find_executable("cl.exe", PATH))
                    or cmd[0]]
         # - Secondly, we fix a bunch of command line arguments.
         for idx, c in enumerate(cmd):
             # create .dll instead of .pyd files
             #if '.pyd' in c: cmd[idx] = c = c.replace('.pyd', '.dll')  #20160601, by MrX
             # replace /c by -c
             if c == '/c': cmd[idx] = '-c'
             # replace /DLL by --shared
             elif c == '/DLL': cmd[idx] = '--shared'
             # remove --compiler-options=-fPIC
             elif '-fPIC' in c: del cmd[idx]
             # replace /Tc... by ...
             elif c.startswith('/Tc'): cmd[idx] = c[3:]
             # replace /Fo... by -o ...
             elif c.startswith('/Fo'): cmd[idx:idx+1] = ['-o', c[3:]]
             # replace /LIBPATH:... by -L...
             elif c.startswith('/LIBPATH:'): cmd[idx] = '-L' + c[9:]
             # replace /OUT:... by -o ...
             elif c.startswith('/OUT:'): cmd[idx:idx+1] = ['-o', c[5:]]
             # remove /EXPORT:initlibcudamat or /EXPORT:initlibcudalearn
             elif c.startswith('/EXPORT:'): del cmd[idx]
             # replace cublas.lib by -lcublas
             elif c == 'cublas.lib': cmd[idx] = '-lcublas'
         # - Finally, we pass on all arguments starting with a '/' to the
         #   compiler or linker, and have nvcc handle all other arguments
         if '--shared' in cmd:
             pass_on = '--linker-options='
             # we only need MSVCRT for a .dll, remove CMT if it sneaks in:
             cmd.append('/NODEFAULTLIB:libcmt.lib')
         else:
             pass_on = '--compiler-options='
         cmd = ([c for c in cmd if c[0] != '/'] +
                [pass_on + ','.join(c for c in cmd if c[0] == '/')])
         # For the future: Apart from the wrongly set PATH by Anaconda, it
         # would suffice to run the following for compilation on Windows:
         # nvcc -c -O -o <file>.obj <file>.cu
         # And the following for linking:
         # nvcc --shared -o <file>.dll <file1>.obj <file2>.obj -lcublas
         # This could be done by a NVCCCompiler class for all platforms.
     spawn(cmd, search_path, verbose, dry_run)
示例#41
0
--- ./setup.py.orig	2010-05-09 14:01:38.000000000 -0400
+++ ./setup.py	2010-12-21 17:34:04.929916874 -0500
@@ -8,7 +8,7 @@
 from distutils.core import setup, Extension
 from distutils.command.build import build    # nidsMaker
 from distutils.spawn import spawn            # nidsMaker.run()
-import os, os.path
+import os, os.path, shutil
 
 pathjoin = os.path.join
 
@@ -30,12 +30,14 @@
     def buildNids(self):
         # extremely crude package builder
         try:
-            os.stat(self.NIDSDIR)
+            os.stat(self.NIDSDIR + '/.done')
             return None           # assume already built
         except OSError:
             pass
 
-        spawn(['tar', '-zxf', self.NIDSTAR], search_path = 1)
+        spawn(['touch', self.NIDSDIR + '/.done'], search_path = 1)
+        shutil.copyfile("config.guess",pathjoin(self.NIDSDIR,"config.guess"))
+        shutil.copyfile("config.sub",pathjoin(self.NIDSDIR,"config.sub"))
         os.chdir(self.NIDSDIR)
         spawn([pathjoin('.','configure'), 'CFLAGS=-fPIC'])
         spawn(['make'], search_path = 1)
示例#42
0
def byte_compile(py_files,
                 optimize=0,
                 force=0,
                 prefix=None,
                 base_dir=None,
                 verbose=1,
                 dry_run=0,
                 direct=None):
    """Byte-compile a collection of Python source files to either .pyc
    or .pyo files in the same directory.  'py_files' is a list of files
    to compile; any files that don't end in ".py" are silently skipped.
    'optimize' must be one of the following:
      0 - don't optimize (generate .pyc)
      1 - normal optimization (like "python -O")
      2 - extra optimization (like "python -OO")
    If 'force' is true, all files are recompiled regardless of
    timestamps.

    The source filename encoded in each bytecode file defaults to the
    filenames listed in 'py_files'; you can modify these with 'prefix' and
    'basedir'.  'prefix' is a string that will be stripped off of each
    source filename, and 'base_dir' is a directory name that will be
    prepended (after 'prefix' is stripped).  You can supply either or both
    (or neither) of 'prefix' and 'base_dir', as you wish.

    If 'dry_run' is true, doesn't actually do anything that would
    affect the filesystem.

    Byte-compilation is either done directly in this interpreter process
    with the standard py_compile module, or indirectly by writing a
    temporary script and executing it.  Normally, you should let
    'byte_compile()' figure out to use direct compilation or not (see
    the source for details).  The 'direct' flag is used by the script
    generated in indirect mode; unless you know what you're doing, leave
    it set to None.
    """
    # nothing is done if sys.dont_write_bytecode is True
    if sys.dont_write_bytecode:
        raise DistutilsByteCompileError('byte-compiling is disabled.')

    # First, if the caller didn't force us into direct or indirect mode,
    # figure out which mode we should be in.  We take a conservative
    # approach: choose direct mode *only* if the current interpreter is
    # in debug mode and optimize is 0.  If we're not in debug mode (-O
    # or -OO), we don't know which level of optimization this
    # interpreter is running with, so we can't do direct
    # byte-compilation and be certain that it's the right thing.  Thus,
    # always compile indirectly if the current interpreter is in either
    # optimize mode, or if either optimization level was requested by
    # the caller.
    if direct is None:
        direct = (__debug__ and optimize == 0)

    # "Indirect" byte-compilation: write a temporary script and then
    # run it with the appropriate flags.
    if not direct:
        try:
            from tempfile import mkstemp
            (script_fd, script_name) = mkstemp(".py")
        except ImportError:
            from tempfile import mktemp
            (script_fd, script_name) = None, mktemp(".py")
        log.info("writing byte-compilation script '%s'", script_name)
        if not dry_run:
            if script_fd is not None:
                script = os.fdopen(script_fd, "w")
            else:
                script = open(script_name, "w")

            script.write("""\
from distutils.util import byte_compile
files = [
""")

            # XXX would be nice to write absolute filenames, just for
            # safety's sake (script should be more robust in the face of
            # chdir'ing before running it).  But this requires abspath'ing
            # 'prefix' as well, and that breaks the hack in build_lib's
            # 'byte_compile()' method that carefully tacks on a trailing
            # slash (os.sep really) to make sure the prefix here is "just
            # right".  This whole prefix business is rather delicate -- the
            # problem is that it's really a directory, but I'm treating it
            # as a dumb string, so trailing slashes and so forth matter.

            #py_files = map(os.path.abspath, py_files)
            #if prefix:
            #    prefix = os.path.abspath(prefix)

            script.write(string.join(map(repr, py_files), ",\n") + "]\n")
            script.write("""
byte_compile(files, optimize=%r, force=%r,
             prefix=%r, base_dir=%r,
             verbose=%r, dry_run=0,
             direct=1)
""" % (optimize, force, prefix, base_dir, verbose))

            script.close()

        cmd = [sys.executable, script_name]
        if optimize == 1:
            cmd.insert(1, "-O")
        elif optimize == 2:
            cmd.insert(1, "-OO")
        spawn(cmd, dry_run=dry_run)
        execute(os.remove, (script_name, ),
                "removing %s" % script_name,
                dry_run=dry_run)

    # "Direct" byte-compilation: use the py_compile module to compile
    # right here, right now.  Note that the script generated in indirect
    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
    # cross-process recursion.  Hey, it works!
    else:
        from py_compile import compile

        for file in py_files:
            if file[-3:] != ".py":
                # This lets us be lazy and not filter filenames in
                # the "install_lib" command.
                continue

            # Terminology from the py_compile module:
            #   cfile - byte-compiled file
            #   dfile - purported source filename (same as 'file' by default)
            cfile = file + (__debug__ and "c" or "o")
            dfile = file
            if prefix:
                if file[:len(prefix)] != prefix:
                    raise ValueError, \
                          ("invalid prefix: filename %r doesn't start with %r"
                           % (file, prefix))
                dfile = dfile[len(prefix):]
            if base_dir:
                dfile = os.path.join(base_dir, dfile)

            cfile_base = os.path.basename(cfile)
            if direct:
                if force or newer(file, cfile):
                    log.info("byte-compiling %s to %s", file, cfile_base)
                    if not dry_run:
                        compile(file, cfile, dfile)
                else:
                    log.debug("skipping byte-compilation of %s to %s", file,
                              cfile_base)
 def run(self):
     cwd = os.getcwd()
     run_cmake()
     os.chdir(cwd)
     ds.spawn(['./copy_cores.sh'])
     _build.build.run(self)
示例#44
0
def run_swig(files,
             dir,
             gendir,
             package,
             USE_SWIG,
             force,
             swig_args,
             swig_deps=[],
             add_under=False):
    """Run SWIG the way I want it done"""

    if USE_SWIG and not os.path.exists(os.path.join(dir, gendir)):
        os.mkdir(os.path.join(dir, gendir))

    sources = []

    if add_under: pre = '_'
    else: pre = ''

    for file in files:
        basefile = os.path.splitext(file)[0]
        i_file = os.path.join(dir, file)
        py_file = os.path.join(dir, gendir, pre + basefile + '.py')
        cpp_file = os.path.join(dir, gendir, pre + basefile + '_wrap.cpp')

        if add_under:
            interface = ['-interface', '_' + basefile + '_']
        else:
            interface = []

        sources.append(cpp_file)

        if not cleaning and USE_SWIG:
            for dep in swig_deps:
                # this may fail for external builds, but it's not
                # a fatal error, so keep going.
                try:
                    if newer(dep, py_file) or newer(dep, cpp_file):
                        force = 1
                        break
                except:
                    pass

            if force or newer(i_file, py_file) or newer(i_file, cpp_file):
                ## we need forward slashes here, even on win32
                #cpp_file = opj(cpp_file) #'/'.join(cpp_file.split('\\'))
                #i_file = opj(i_file)     #'/'.join(i_file.split('\\'))

                if BUILD_RENAMERS:
                    xmltemp = tempfile.mktemp('.xml')

                    # First run swig to produce the XML file, adding
                    # an extra -D that prevents the old rename
                    # directives from being used
                    cmd = [ swig_cmd ] + swig_args + \
                          [ '-DBUILDING_RENAMERS', '-xmlout', xmltemp ] + \
                          ['-I'+dir, '-o', cpp_file, i_file]
                    msg(' '.join(cmd))
                    spawn(cmd)

                    # Next run build_renamers to process the XML
                    myRenamer = BuildRenamers()
                    myRenamer.run(dir, pre + basefile, xmltemp)
                    os.remove(xmltemp)

                # Then run swig for real
                cmd = [ swig_cmd ] + swig_args + interface + \
                      ['-I'+dir, '-o', cpp_file, i_file]
                msg(' '.join(cmd))
                spawn(cmd)

        # copy the generated python file to the package directory
        copy_file(py_file, package, update=not force, verbose=0)
        CLEANUP.append(opj(package, os.path.basename(py_file)))

    return sources
示例#45
0
 def merge(self, po_file, key_file):
     log.info("Updating %s" % po_file)
     cmd = ['msgmerge', '-N', '--backup=none', '-U', po_file, self.key_file]
     spawn(cmd)
示例#46
0
 def spawn(self, cmd):
     spawn(cmd, dry_run=self.dry_run)
示例#47
0
def build_library_files(dry_run, pic=False):
    cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')]
    if pic:
        os.environ['CXXFLAGS'] = os.environ.get('CXXFLAGS', '') + ' -fPIC'
    spawn(cmd, dry_run=dry_run)
示例#48
0
 def spawn(self, cmd, search_path=1, level=1):
     """Spawn an external command respecting dry-run flag."""
     from distutils.spawn import spawn
     spawn(cmd, search_path, dry_run=self.dry_run)
示例#49
0
def byte_compile(py_files,
                 optimize=0,
                 force=0,
                 prefix=None,
                 base_dir=None,
                 verbose=1,
                 dry_run=0,
                 direct=None):
    if sys.dont_write_bytecode:
        raise DistutilsByteCompileError('byte-compiling is disabled.')
    if direct is None:
        direct = __debug__ and optimize == 0
    if not direct:
        try:
            from tempfile import mkstemp
            script_fd, script_name = mkstemp('.py')
        except ImportError:
            from tempfile import mktemp
            script_fd, script_name = None, mktemp('.py')

        log.info("writing byte-compilation script '%s'", script_name)
        if not dry_run:
            if script_fd is not None:
                script = os.fdopen(script_fd, 'w')
            else:
                script = open(script_name, 'w')
            script.write(
                'from distutils.util import byte_compile\nfiles = [\n')
            script.write(string.join(map(repr, py_files), ',\n') + ']\n')
            script.write(
                '\nbyte_compile(files, optimize=%r, force=%r,\n             prefix=%r, base_dir=%r,\n             verbose=%r, dry_run=0,\n             direct=1)\n'
                % (optimize, force, prefix, base_dir, verbose))
            script.close()
        cmd = [sys.executable, script_name]
        if optimize == 1:
            cmd.insert(1, '-O')
        elif optimize == 2:
            cmd.insert(1, '-OO')
        spawn(cmd, dry_run=dry_run)
        execute(os.remove, (script_name, ),
                'removing %s' % script_name,
                dry_run=dry_run)
    else:
        from py_compile import compile
        for file in py_files:
            if file[-3:] != '.py':
                continue
            cfile = file + (__debug__ and 'c' or 'o')
            dfile = file
            if prefix:
                if file[:len(prefix)] != prefix:
                    raise ValueError, "invalid prefix: filename %r doesn't start with %r" % (
                        file, prefix)
                dfile = dfile[len(prefix):]
            if base_dir:
                dfile = os.path.join(base_dir, dfile)
            cfile_base = os.path.basename(cfile)
            if direct:
                if force or newer(file, cfile):
                    log.info('byte-compiling %s to %s', file, cfile_base)
                    if not dry_run:
                        compile(file, cfile, dfile)
                else:
                    log.debug('skipping byte-compilation of %s to %s', file,
                              cfile_base)

    return
        sys.exit(1)
    keyfile = os.path.join(args.keydir, os.path.basename(args.keydir) + ".key")
    if not os.path.exists(keyfile):
        print("Cannot find private key: %s" % (keyfile))
        sys.exit(1)

    with open(args.filename, 'rb') as fh:
        subordinate = fh.read()
        fit_io = StringIO.StringIO(subordinate)
        dtb = pyfdt.FdtBlobParse(fit_io)
        fdt = dtb.to_fdt()

    sub_image = fdt.resolve_path('/images/fdt@1/signature@1/key-name-hint')
    if sub_image is None:
        print("This subordinate store does not contain a signature node")
        sys.exit(1)
    requested_key_name = os.path.basename(args.keydir)
    sub_image.strings = [requested_key_name]
    
    subordinate_source = fdt.to_dts()
    with tempfile.NamedTemporaryFile() as tmp:
        tmp.write(subordinate_source)
        tmp.flush()

        print(" ".join(
            [args.mkimage, "-f", tmp.name, "-k", args.keydir, "-r", args.output]))
        spawn.spawn(
            [args.mkimage, "-f", tmp.name, "-k", args.keydir, "-r", args.output])

    print("Wrote signed subordinate certificate store: %s" % (args.output))
示例#51
0
def run_process(args):
    spawn(args)
示例#52
0
 def configure_omim(self):
     with chdir(OMIM_ROOT):
         spawn(['./configure.sh'])
示例#53
0
    def upload_file(self, command, pyversion, filename):
        schema, netloc, url, params, query, fragments = urlparse.urlparse(
            self.repository)
        if params or query or fragments:
            raise AssertionError('Incompatible url %s' % self.repository)
        if schema not in ('http', 'https'):
            raise AssertionError('unsupported schema ' + schema)
        if self.sign:
            gpg_args = ['gpg', '--detach-sign', '-a', filename]
            if self.identity:
                gpg_args[2:2] = ['--local-user', self.identity]
            spawn(gpg_args, dry_run=self.dry_run)
        f = open(filename, 'rb')
        try:
            content = f.read()
        finally:
            f.close()

        meta = self.distribution.metadata
        data = {
            ':action': 'file_upload',
            'protcol_version': '1',
            'name': meta.get_name(),
            'version': meta.get_version(),
            'content': (os.path.basename(filename), content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),
            'metadata_version': '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes()
        }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment
        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + '.asc',
                                     open(filename + '.asc').read())
        auth = 'Basic ' + standard_b64encode(self.username + ':' +
                                             self.password)
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ''
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write('\n\n')
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')

        body.write(end_boundary)
        body.write('\n')
        body = body.getvalue()
        self.announce('Submitting %s to %s' % (filename, self.repository),
                      log.INFO)
        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': str(len(body)),
            'Authorization': auth
        }
        request = Request(self.repository, data=body, headers=headers)
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error as e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError as e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)
示例#54
0
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename, 'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename), content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version': '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
        }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename + ".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository),
                      log.INFO)

        # build the Request
        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': str(len(body)),
            'Authorization': auth
        }

        request = Request(self.repository, data=body, headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, result.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            raise
示例#55
0
 def spawn(self, cmd, **kwargs):
     spawn(cmd, dry_run=self.dry_run, **kwargs)
示例#56
0
 def run(self):
     spawn([
         sys.executable, 'devscripts/make_lazy_extractors.py',
         'yt_dlp/extractor/lazy_extractors.py'
     ],
           dry_run=self.dry_run)
示例#57
0
def build_library_files(dry_run):
    cmd = [sys.executable, os.path.join(here_dir, 'ffi', 'build.py')]
    spawn(cmd, dry_run=dry_run)
示例#58
0
def check_minimum_libvirt_version():
    spawn([
        get_pkgcfg(), "--print-errors",
        "--atleast-version=%s" % MIN_LIBVIRT, "libvirt"
    ])
示例#59
0
 def run(self):
     for po_file in Utils.find_files_of_type('po', '*.po'):
         cmd = ['msguniq', po_file, '-o', po_file]
         spawn(cmd)
示例#60
0
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args, dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename, 'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protocol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename), content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': hashlib.md5(content).hexdigest(),

            # additional meta-data
            'metadata_version': '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
        }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename + ".asc", "rb").read())

        # set up the authentication
        user_pass = (self.username + ":" + self.password).encode('ascii')
        # The exact encoding of the authentication string is debated.
        # Anyway PyPI only accepts ascii for both username or password.
        auth = "Basic " + standard_b64encode(user_pass).decode('ascii')

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = b'\r\n--' + boundary.encode('ascii')
        end_boundary = sep_boundary + b'--\r\n'
        body = io.BytesIO()
        for key, value in data.items():
            title = '\r\nContent-Disposition: form-data; name="%s"' % key
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    title += '; filename="%s"' % value[0]
                    value = value[1]
                else:
                    value = str(value).encode('utf-8')
                body.write(sep_boundary)
                body.write(title.encode('utf-8'))
                body.write(b"\r\n\r\n")
                body.write(value)
        body.write(end_boundary)
        body = body.getvalue()

        msg = "Submitting %s to %s" % (filename, self.repository)
        self.announce(msg, log.INFO)

        # build the Request
        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': str(len(body)),
            'Authorization': auth,
        }

        request = Request(self.repository, data=body, headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
        except HTTPError as e:
            status = e.code
            reason = e.msg
        except OSError as e:
            self.announce(str(e), log.ERROR)
            raise

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
            if self.show_response:
                text = self._read_pypi_response(result)
                msg = '\n'.join(('-' * 75, text, '-' * 75))
                self.announce(msg, log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)