Example #1
0
def compareDirectories(new, old, relpath = ""):
    log("XXX new " + new + " old "+old + " relpath "+relpath)
    newroot, newdirs, newfiles = next(os.walk(new))
    oldroot, olddirs, oldfiles = next(os.walk(old))
    log("new dirs: " + ",".join(newdirs))
    all_dirs = list(set(newdirs) | set(olddirs))
    log("all dirs: " + ",".join(all_dirs))
    common_dirs = list(set(newdirs) & set(olddirs))
    log("common dirs: "+ ",".join(common_dirs))
    missing_new_dirs = list(set(newdirs) - set(common_dirs))
    log("missing new dirs: "+ ",".join(missing_new_dirs))
    compareFiles(newroot,oldroot,newfiles, oldfiles, relpath)    
    for missing_dir in missing_new_dirs:
        full_path = os.path.join(diff_root,relpath)
        full_path = os.path.join(full_path,missing_dir)
        log("full " + full_path)
        mkpath(full_path)
        src = os.path.join(newroot,missing_dir)
        log("missing dir src "+src)
        copy_tree(src, full_path)
    for common_dir in common_dirs:
        next_new_dir = os.path.join(newroot,common_dir)
        next_old_dir = os.path.join(oldroot,common_dir)
        newrelpath = os.path.join(relpath,common_dir);
        log("newrelpath "+newrelpath)
        compareDirectories(next_new_dir, next_old_dir, newrelpath)        
    def createMoFile(self):
        try:
            mkpath(self.LOCALEDIR)
            copy_file(self.POFILEPATH, self.LOCALEDIR)
            cwd = os.getcwd()
            os.chdir(self.LOCALEDIR)

            if self.USE_MSGFMT_BINARY:
                # The msgfmt binary that ships as part of GNU gettext tools
                # is more robust then the Python version and includes
                # error checking capabilities.
                moFile = self.POFILE[:-2] + "mo"
                exp = ["msgfmt", "-c", "--check-accelerators", "-o%s" % moFile,
                       self.POFILE]

            else:
                # The msgfmt gettext binary is not installed by default on
                # Windows and OS X. The Python version of msgfmt is included
                # however with Chandler.
                msgfmt = os.path.join(self.CHANDLERHOME, "tools", "msgfmt.py")
                exp = [self.PYTHON,  msgfmt, self.POFILE]

            result = build_lib.runCommand(exp, timeout=TIMEOUT, logger=ignore, ignorepreexec=True)
            os.chdir(cwd)

            if result != 0:
                raise Exception(' '.join(exp) + ' failed with error code %d' % result)

        except Exception, e:
            self.raiseError("Unable to create mo file from %s': %s." % (self.POFILEPATH, e))
Example #3
0
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
                 mode='w'):
    """Create a zip file from all the files under 'base_dir'.  The output
    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
    Python module (if available) or the InfoZIP "zip" utility (if installed
    and found on the default search path).  If neither tool is available,
    raises DistutilsExecError.  Returns the name of the output zip file.
    """
    import zipfile

    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)

    def visit(z, dirname, names):
        for name in names:
            path = os.path.normpath(os.path.join(dirname, name))
            if os.path.isfile(path):
                p = path[len(base_dir) + 1:]
                if not dry_run:
                    z.write(path, p)
                log.debug("adding '%s'" % p)

    compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
    if not dry_run:
        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
        for dirname, dirs, files in os.walk(base_dir):
            visit(z, dirname, files)
        z.close()
    else:
        for dirname, dirs, files in os.walk(base_dir):
            visit(None, dirname, files)
    return zip_filename
Example #4
0
def export_site(build_dir, static_files_dir, template_dir, locale_dir, locale_domain, template_vars):
    if os.path.exists(build_dir):
        shutil.rmtree(build_dir)
    print "copying static files"
    shutil.copytree(static_files_dir, build_dir)
    print "generating locale redirectors"
    redirector = LocaleRedirectorServer(template_dir)
    for dirpath, dirnames, filenames in os.walk(template_dir):
        files = [os.path.join(dirpath, filename)[len(template_dir) + 1 :] for filename in filenames]
        for relpath in files:
            print "  %s" % (relpath)
            abspath = os.path.join(template_dir, relpath)
            env = {"PATH_INFO": "/%s" % relpath}
            if relpath.endswith("index.html"):
                env["PATH_INFO"] = env["PATH_INFO"][: -len("index.html")]
            mimetype, contents = redirector.handle_file_as_jinja2_template(env, template_dir, abspath)
            dest_path = os.path.join(build_dir, relpath)
            mkpath(os.path.dirname(dest_path))
            open(dest_path, "w").write(contents)
    server = LocalizedTemplateServer(template_dir, locale_dir, locale_domain, template_vars)
    locales = find_locales(locale_dir, locale_domain, NULL_LOCALE)
    for hyphenated_locale in locales:
        print "processing localization '%s'" % hyphenated_locale
        env = {}
        server.maybe_apply_translation(env, unhyphenate(hyphenated_locale))
        for dirpath, dirnames, filenames in os.walk(template_dir):
            files = [os.path.join(dirpath, filename)[len(template_dir) + 1 :] for filename in filenames]
            for relpath in files:
                print "  %s/%s" % (hyphenated_locale, relpath)
                abspath = os.path.join(template_dir, relpath)
                mimetype, contents = server.handle_file_as_jinja2_template(env, template_dir, abspath)
                dest_path = os.path.join(build_dir, hyphenated_locale, relpath)
                mkpath(os.path.dirname(dest_path))
                open(dest_path, "w").write(contents)
    print "done.\n\nyour new static site is located at:\n%s" % build_dir
Example #5
0
def copydir_run_2to3(src, dest, template=None, fixer_names=None,
                     options=None, explicit=None):
    """Recursively copy a directory, only copying new and changed files,
    running run_2to3 over all newly copied Python modules afterward.

    If you give a template string, it's parsed like a MANIFEST.in.
    """
    from distutils.dir_util import mkpath
    from distutils.file_util import copy_file
    from distutils.filelist import FileList
    filelist = FileList()
    curdir = os.getcwd()
    os.chdir(src)
    try:
        filelist.findall()
    finally:
        os.chdir(curdir)
    filelist.files[:] = filelist.allfiles
    if template:
        for line in template.splitlines():
            line = line.strip()
            if not line: continue
            filelist.process_template_line(line)
    copied = []
    for filename in filelist.files:
        outname = os.path.join(dest, filename)
        mkpath(os.path.dirname(outname))
        res = copy_file(os.path.join(src, filename), outname, update=1)
        if res[1]: copied.append(outname)
    run_2to3([fn for fn in copied if fn.lower().endswith('.py')],
             fixer_names=fixer_names, options=options, explicit=explicit)
    return copied
Example #6
0
def copy_static_scaffold(output_dir):
    source_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'atramhasis', 'static'))
    dest_dir = os.path.join(os.path.dirname(__file__), 'atramhasis', 'scaffolds', output_dir, '+package+', 'static')
    dir_util.copy_tree(os.path.join(source_dir, 'css'), os.path.join(dest_dir, 'css'), update=True)
    dir_util.copy_tree(os.path.join(source_dir, 'img'), os.path.join(dest_dir, 'img'), update=True)
    dir_util.copy_tree(os.path.join(source_dir, 'js'), os.path.join(dest_dir, 'js'), update=True)
    dir_util.copy_tree(os.path.join(source_dir, 'scss', 'atramhasis'), os.path.join(dest_dir, 'scss', 'atramhasis'),
                       update=True)
    dir_util.mkpath(os.path.join(dest_dir, 'admin'))
    file_util.copy_file(
        os.path.join(source_dir, 'admin', '.bowerrc'),
        os.path.join(dest_dir, 'admin', '.bowerrc'),
        update=True
    )
    file_util.copy_file(
        os.path.join(source_dir, 'admin', 'bower.json'),
        os.path.join(dest_dir, 'admin', 'bower.json'),
        update=True
    )
    file_util.copy_file(
        os.path.join(source_dir, 'admin', 'Gruntfile.js'),
        os.path.join(dest_dir, 'admin', 'Gruntfile.js'),
        update=True
    )
    file_util.copy_file(
        os.path.join(source_dir, 'admin', 'package.json'),
        os.path.join(dest_dir, 'admin', 'package.json'),
        update=True
    )
Example #7
0
  def constructPreBuiltHTMITFakeroot(self):
    """
    Construct fakeroot from prebuilt htm-it

    :returns: SHA of the products repo in the fakeroot
    :rtype: tuple
    """

    config = self.config
    logger = self.logger
    productsDirectory = self.productsDirectory
    logger.debug("Creating %s", productsDirectory)
    mkpath(productsDirectory)
    copy_tree(config.productsDir, productsDirectory)
    iteration = git.getCommitCount(productsDirectory, logger=logger)

    with changeToWorkingDir(productsDirectory):
      actualSHA = git.getCurrentSha(logger=logger)

    # Set extra python path
    self.setPythonPath()

    # Clean HTM-IT Scripts
    self.cleanScripts()

    # Purge anything not whitelisted
    self.purgeBlacklistedStuff()

    return (iteration, actualSHA)
Example #8
0
def generate_default_shinken_file():
    # The default file must have good values for the directories:
    # etc, var and where to push scripts that launch the app.
    templatefile = "bin/default/shinken.in"
    build_base = 'build'
    outfile = os.path.join(build_base, "bin/default/shinken")

    #print('generating %s from %s', outfile, templatefile)

    mkpath(os.path.dirname(outfile))
    
    bin_path = default_paths['bin']

    # Read the template file
    f = open(templatefile)
    buf = f.read()
    f.close
    # substitute
    buf = buf.replace("$ETC$", default_paths['etc'])
    buf = buf.replace("$VAR$", default_paths['var'])
    buf = buf.replace("$RUN$", default_paths['run'])
    buf = buf.replace("$LOG$", default_paths['log'])
    buf = buf.replace("$SCRIPTS_BIN$", bin_path)
    # write out the new file
    f = open(outfile, "w")
    f.write(buf)
    f.close()
Example #9
0
 def distutils_extension(self, tmpdir='build', verbose=True):
     from distutils.dir_util import mkpath
     from .recompiler import recompile
     #
     if not hasattr(self, '_assigned_source'):
         if hasattr(self, 'verifier'):     # fallback, 'tmpdir' ignored
             return self.verifier.get_extension()
         raise ValueError("set_source() must be called before"
                          " distutils_extension()")
     module_name, source, source_extension, kwds = self._assigned_source
     if source is None:
         raise TypeError("distutils_extension() is only for C extension "
                         "modules, not for dlopen()-style pure Python "
                         "modules")
     mkpath(tmpdir)
     ext, updated = recompile(self, module_name,
                              source, tmpdir=tmpdir, extradir=tmpdir,
                              source_extension=source_extension,
                              call_c_compiler=False, **kwds)
     if verbose:
         if updated:
             sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
         else:
             sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
     return ext
Example #10
0
def save_state(state, data_dir):
    state_file = state_file_path(data_dir)
    new_state = "{0}".format(state)
    print('Saving migration state "{0}" to {1}'.format(new_state, state_file))
    mkpath(os.path.dirname(state_file))
    with open(state_file, "w") as f:
        f.write(new_state)
Example #11
0
def the_end(site_post):
    for site_id, li in site_post.iteritems():
        for i in Q.SiteHost.equal_to("site", DB.Site.create_without_data(site_id)).find():
            host = i.get('host')
            path = join(CONFIG.SITEMAP_PATH, host) 
            mkpath(join(path, "sitemap"))
            sitemap(path, host, li)
Example #12
0
def bootstrap():
    # Set PETSC_DIR and PETSC_ARCH
    PETSC_DIR  = os.path.abspath(os.getcwd())
    PETSC_ARCH = get_platform() + '-python'
    os.environ['PETSC_DIR']  = PETSC_DIR
    os.environ['PETSC_ARCH'] = PETSC_ARCH
    sys.path.insert(0, os.path.join(PETSC_DIR, 'config'))
    # Generate package __init__.py file
    from distutils.dir_util import mkpath
    pkgdir = os.path.join('config', 'pypi')
    if not os.path.exists(pkgdir): mkpath(pkgdir)
    pkgfile = os.path.join(pkgdir, '__init__.py')
    fh = open(pkgfile, 'wt')
    fh.write(init_py)
    fh.close()
    # Simple-minded lookup for MPI and mpi4py
    mpi4py = mpicc = None
    try:
        import mpi4py
        conf = mpi4py.get_config()
        mpicc = conf.get('mpicc')
    except ImportError: # mpi4py is not installed
        mpi4py = None
        mpicc = os.environ.get('MPICC') or find_executable('mpicc')
    except AttributeError: # mpi4py is too old
        pass
    if ('setuptools' in sys.modules):
        metadata['zip_safe'] = False
        if not mpi4py and mpicc:
            metadata['install_requires']= ['mpi4py>=1.2.2']
Example #13
0
def build_data_filename(job, filetype):
    """
    Generates an absolute path to a file for the job and type.
    """
    path = '/tmp/%s/data%s' % (job, EXTENSIONS[filetype])
    mkpath(os.path.dirname(path))
    return os.path.abspath(path)
Example #14
0
 def test_mkpath_with_custom_mode(self):
     umask = os.umask(2)
     os.umask(umask)
     mkpath(self.target, 448)
     self.assertEqual(stat.S_IMODE(os.stat(self.target).st_mode), 448 & ~umask)
     mkpath(self.target2, 365)
     self.assertEqual(stat.S_IMODE(os.stat(self.target2).st_mode), 365 & ~umask)
def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0):
    """Create a (possibly compressed) tar file from all the files under
    'base_dir'.  'compress' must be "gzip" (the default), "compress",
    "bzip2", or None.  Both "tar" and the compression utility named by
    'compress' must be on the default program search path, so this is
    probably Unix-specific.  The output tar file will be named 'base_dir' +
    ".tar", possibly plus the appropriate compression extension (".gz",
    ".bz2" or ".Z").  Return the output filename.
    """
    # XXX GNU tar 1.13 has a nifty option to add a prefix directory.
    # It's pretty new, though, so we certainly can't require it --
    # but it would be nice to take advantage of it to skip the
    # "create a tree of hardlinks" step!  (Would also be nice to
    # detect GNU tar to use its 'z' option and save a step.)

    compress_ext = {"gzip": ".gz", "bzip2": ".bz2", "compress": ".Z"}

    # flags for compression program, each element of list will be an argument
    compress_flags = {"gzip": ["-f9"], "compress": ["-f"], "bzip2": ["-f9"]}

    if compress is not None and compress not in compress_ext.keys():
        raise ValueError, "bad value for 'compress': must be None, 'gzip', or 'compress'"

    archive_name = base_name + ".tar"
    mkpath(os.path.dirname(archive_name), dry_run=dry_run)
    cmd = ["tar", "-cf", archive_name, base_dir]
    spawn(cmd, dry_run=dry_run)

    if compress:
        spawn([compress] + compress_flags[compress] + [archive_name], dry_run=dry_run)
        return archive_name + compress_ext[compress]
    else:
        return archive_name
Example #16
0
    def build_extension(self, ext, copy=True):

        self.announce("building '%s' in %s"%(ext[1], ext[0]))

        if os.name == 'nt': 
            # same as in config.py here: even on 'nt' we have to
            # use posix paths because we run in a cygwin shell at this point
            path = self.build_temp.replace('\\', '/') + '/' + ext[0]
            temp_target = self.build_temp.replace('\\', '/') + '/' + ext[0]
        else:
            path = os.path.join(self.build_temp, ext[0])
            temp_target = os.path.join(self.build_temp, ext[0])
        
        make = os.environ.get('MAKE', 'make')

        command = '%s -C "%s" %s'%(make, path, ext[1])
        spawn(['sh', '-c', command], self.verbose, self.dry_run)

        #The extension may not be compiled. For now just skip it.
        if copy and os.path.isfile(os.path.join(temp_target, ext[1])):
            
            if self.inplace: build_path = ext[0]
            else: build_path = os.path.join(self.build_lib, ext[0])            
            mkpath (build_path, 0777, self.verbose, self.dry_run)
            copy_file(os.path.join(path, ext[1]),
                      os.path.join(build_path, ext[1]),
                      1, 1, 0, None, self.verbose, self.dry_run)
Example #17
0
def make_zipfile(base_name, base_dir, verbose = 0, dry_run = 0):
    try:
        import zipfile
    except ImportError:
        zipfile = None

    zip_filename = base_name + '.zip'
    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    if zipfile is None:
        if verbose:
            zipoptions = '-r'
        else:
            zipoptions = '-rq'
        try:
            spawn(['zip',
             zipoptions,
             zip_filename,
             base_dir], dry_run=dry_run)
        except DistutilsExecError:
            raise DistutilsExecError, "unable to create zip file '%s': could neither import the 'zipfile' module nor find a standalone zip utility" % zip_filename

    else:
        log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
        if not dry_run:
            zip = zipfile.ZipFile(zip_filename, 'w', compression=zipfile.ZIP_DEFLATED)
            for dirpath, dirnames, filenames in os.walk(base_dir):
                for name in filenames:
                    path = os.path.normpath(os.path.join(dirpath, name))
                    if os.path.isfile(path):
                        zip.write(path, path)
                        log.info("adding '%s'" % path)

            zip.close()
    return zip_filename
Example #18
0
    def _move(self, instance=None, **kwargs):
        if hasattr(instance, 'get_upload_to'):
            src = getattr(instance, self.attname)
            if src:
                src = str(src)
                m = re.match(r"%s/(.*)" % self.upload_to, src)
                if m:
                    upload_path = instance.get_upload_to(self.attname)
                    dst = "%s%s" % (
                        upload_path, 
                        m.groups()[0]
                        )
                    basedir = os.path.join(
                      settings.MEDIA_ROOT, 
                      os.path.dirname(dst)
                    )
                    fromdir = os.path.join(
                      settings.MEDIA_ROOT, 
                      src
                    )
                    mkpath(basedir)
                    shutil.move(fromdir, 
                      os.path.join(basedir, 
                                   m.groups()[0])
                    )
                    setattr(instance, self.attname, dst)
                    instance.save()

                    if self.signal:
                        self.signal(instance)
Example #19
0
    def __init__(self, *args, **kwargs):
        super(Flask, self).__init__(*args, **kwargs)

        self.request_class = TrexRequest
        # Add trex/templates to the jinja2 search path
        self.jinja_loader.searchpath.append(os.path.join(os.path.dirname(__file__), 'templates'))

        self.settings.readfp(codecs.open(os.path.join(self.root_path, '..', 'trex', 'base.ini'), 'r', 'utf8'))
        self.settings.readfp(codecs.open(os.path.join(self.root_path, 'default.ini'), 'r', 'utf8'))
        self.check_default_config()
        self.settings.readfp(codecs.open(os.path.join(self.root_path, 'local.ini'), 'r', 'utf8'))
        self.check_local_config()

        # Set up logging directory target. Later this can be configured
        self.log_directory = os.path.abspath(os.path.join(self.root_path, '..', 'logs'))
        mkpath(self.log_directory)

        self.init_jinja()
        self.exception_reporter = FlaskExceptionReporter(self)

        FlaskCDN(self)

        trex.support.model.settings = self.settings

        self.init_application()

        if self.settings.getboolean('server', 'opcode_audit'):
            from trex.support.audit import TrexAudit
            TrexAudit(self)
Example #20
0
    def __init__(self):
        self.config_path = os.path.join(
            os.path.expanduser("~"), '.config', 'congabonga')
        dir_util.mkpath(self.config_path)

        self.ui = CongaUi()
        self.ui.connect('tab-changed', self.on_ui_tab_changed)
        self.ui.connect('player-seek', self.on_ui_player_seek)
        self.ui.connect('player-pause', self.on_ui_player_pause)
        self.ui.connect('player-next', self.on_ui_player_next)
        self.ui.connect('player-previous', self.on_ui_player_previous)
        self.ui.connect('player-play', self.on_ui_player_play)
        self.ui.connect('playlist-activated', self.on_ui_playlist_activated)
        self.ui.connect('artist-activated', self.on_ui_artist_activated)
        self.ui.connect('album-activated', self.on_ui_album_activated)
        self.ui.connect('search', self.on_ui_search)
        self.ui.connect('library-switch-toggled', self.on_ui_library_switch_toggled)

        self.library = Library(self.config_path)
        self.library.connect('updated', self.on_library_updated)
        self.library.connect('plugins-loaded', self.on_library_plugins_loaded)
        self.library.load()

        self.player = CongaPlayer()
        self.player.connect('playing', self.on_player_playing)
        self.player.connect('stopped', self.on_player_stopped)
        self.player.connect('next', self.on_player_next)
        self.player.connect('position-updated', self.on_player_position_updated)
Example #21
0
    def _build_dependencies(self, transform_dir):
        target_dir = os.path.join(self.venv_dir,
                                  'lib/python2.7/site-packages/biokbase')
        mkpath(target_dir)
        touch(os.path.join(target_dir, '__init__.py'))
        shutil.copytree(
            os.path.join(transform_dir, "lib/biokbase/Transform"),
            os.path.join(self.venv_dir,
                         "lib/python2.7/site-packages/biokbase/Transform"))

        print("Pulling git dependencies")
        for dep in KBASE_DEPENDENCIES:
            print("Checking out " + GITHUB_URL + dep)
            repo = os.path.split(dep)[1]
            gitdir = os.path.join(self.working_dir, repo)
            git.Git().clone(GITHUB_URL + dep, gitdir)
            self._copy_deps(gitdir)
            shutil.rmtree(os.path.join(gitdir))

        scripts_dir = os.path.join(transform_dir, "plugins/scripts/")
        bin_dir = os.path.join(self.venv_dir, "bin/")
        for root, _, files in os.walk(scripts_dir):
            for file_ in files:
                filepath = os.path.join(root, file_)
                print("copy from {0} {1}".format(filepath, bin_dir))
                shutil.copy(filepath, bin_dir)
                self._make_executable_for_all(os.path.join(bin_dir, file_))
                self._make_wrapper(bin_dir, file_)
Example #22
0
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, mode="w"):
    """Create a zip file from all the files under 'base_dir'.  The output
    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
    Python module (if available) or the InfoZIP "zip" utility (if installed
    and found on the default search path).  If neither tool is available,
    raises DistutilsExecError.  Returns the name of the output zip file.
    """
    import zipfile

    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)

    def visit(z, dirname, names):
        for name in names:
            path = os.path.normpath(os.path.join(dirname, name))
            if os.path.isfile(path):
                p = path[len(base_dir) + 1 :]
                if not dry_run:
                    z.write(path, p)
                log.debug("adding '%s'" % p)

    if compress is None:
        compress = sys.version >= "2.4"  # avoid 2.3 zipimport bug when 64 bits

    compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
    if not dry_run:
        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
        os.path.walk(base_dir, visit, z)
        z.close()
    else:
        os.path.walk(base_dir, visit, None)
    return zip_filename
Example #23
0
def copy_data_tree(src, dst, exclude_pattern=['(RCS|CVS|\.svn)', '.*\~']):
    """
    Copy an entire directory tree 'src' to a new location 'dst'.

    :param exclude_pattern: a list of pattern to exclude.
    """
    names = os.listdir(src)
    mkpath(dst)
    outfiles = []

    for p in exclude_pattern:
        names = [x for x in names if not (re.match(p, x))]

    for n in names:
        src_name = os.path.join(src, n)
        dst_name = os.path.join(dst, n)

        if os.path.isdir(src_name):
            ret = copy_data_tree(src_name, dst_name, exclude_pattern)
            outfiles += ret
        else:
            shutil.copy2(src_name, dst_name)
            outfiles.append(dst_name)

    return outfiles
Example #24
0
    def make_lib_archive(self, zip_filename, base_dir, verbose=0,
                         dry_run=0):
        # Like distutils "make_archive", except we can specify the
        # compression to use - default is ZIP_STORED to keep the
        # runtime performance up.
        # Also, we don't append '.zip' to the filename.
        from distutils.dir_util import mkpath
        mkpath(os.path.dirname(zip_filename), dry_run=dry_run)

        if self.compressed:
            compression = zipfile.ZIP_DEFLATED
        else:
            compression = zipfile.ZIP_STORED
        if not dry_run:
            z = zipfile.ZipFile(zip_filename, "w",
                                compression=compression)
            save_cwd = os.getcwd()
            os.chdir(base_dir)
            for dirpath, dirnames, filenames in os.walk('.'):
                for fn in filenames:
                    path = os.path.normpath(os.path.join(dirpath, fn))
                    if os.path.isfile(path):
                        z.write(path, path)
            os.chdir(save_cwd)
            z.close()

        return zip_filename
Example #25
0
    def configure_libffi(self):
        if sys.platform == "win32":
            return
        if LIBFFI_SOURCES == None:
            return
        src_dir = os.path.abspath(LIBFFI_SOURCES)

        # Building libffi in a path containing spaces doesn't work:
        self.build_temp = self.build_temp.replace(" ", "")

        build_dir = os.path.join(self.build_temp, 'libffi')

        if not self.force and self.fix_extension(build_dir):
            return

        mkpath(build_dir)
        config_args = []

        # Pass empty CFLAGS because we'll just append the resulting CFLAGS
        # to Python's; -g or -O2 is to be avoided.
        cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
              % (build_dir, src_dir, " ".join(config_args))

        print 'Configuring static FFI library:'
        print cmd
        res = os.system(cmd)
        if res:
            print "Failed"
            sys.exit(res)

        assert self.fix_extension(build_dir), "Could not find libffi after building it"
Example #26
0
    def run_old(self):
        """Run the distutils command"""
        # check whether we can execute rpmbuild
        if not self.dry_run:
            try:
                devnull = open(os.devnull, 'w')
                subprocess.Popen(['rpmbuild', '--version'],
                                 stdin=devnull, stdout=devnull)
            except OSError:
                raise DistutilsError("Cound not execute rpmbuild. Make sure "
                                     "it is installed and in your PATH")

        mkpath(self.dist_dir)

        # build command: to get the build_base
        cmdbuild = self.get_finalized_command("build")
        cmdbuild.verbose = self.verbose 
        self.build_base = cmdbuild.build_base
        self._populate_rpm_topdir(self.rpm_base)

        for name, rpm_spec in self.rpm_specs.items():
            self._prepare_distribution(name)
            self._create_rpm(rpm_name=name, spec=rpm_spec)

        if not self.keep_temp:
            remove_tree(self.build_base, dry_run=self.dry_run)
Example #27
0
   def build_man_page(self):
      """Build man pages for all installable programs."""
        
      self.announce("building man pages")

      descriptions = {}
      descriptions['synopsis'] = """simple frontend to the Synopsis framework, a multi-language source code introspection tool that
provides a variety of representations for the parsed code, to
enable further processing such as documentation extraction,
reverse engineering, and source-to-source translation."""
      
      descriptions['sxr-server'] = """the Synopsis Cross-Reference http server. Allows users
to query and browse cross-referenced source code."""
      

      help2man = find_executable('help2man')
      if not help2man:
         self.warn("cannot build man pages")
         return
      gzip = find_executable('gzip')

      section = 1
      man_dir = 'share/man/man%d'%section
      mkpath(man_dir, 0777, self.verbose, self.dry_run)

      for s in ['synopsis', 'sxr-server']:
         command = [help2man, '-N', '-n', descriptions[s]]
         executable = os.path.join('scripts', s)
         output = '%s/%s.%d'%(man_dir, s, section)
         command += ['-o', output, executable]
         spawn(command)
         if gzip:
            spawn(['gzip', '-f', output])
Example #28
0
def copy_doc(path, name, pkgdir, exts=TEXT_EXTS, language=None, dry_run=0,
        copy_tree=copy_tree, copy_file=copy_file, mkpath=mkpath):
    if path is None:
        return
    is_string = hasattr(path, 'getvalue')
    if is_string:
        ext = '.txt'
    else:
        ext = os.path.splitext(path)[1].lower()
        if ext == '':
            ext = '.txt'
    if ext not in exts:
        raise ValueError('Invalid extension for %s' % (path,))
    destdir = os.path.join(pkgdir, 'Contents', 'Resources')
    if language is not None:
        destdir = os.path.join(destdir, language + '.lproj')
    mkpath(destdir)
    dest = os.path.join(destdir, name + ext)
    if is_string:
        if not dry_run:
            f = open(dest, 'w')
            f.write(path.getvalue())
            f.close()
    elif ext == '.rtfd':
        copy_tree(path, dest)
    else:
        copy_file(path, dest)
Example #29
0
def create_config():
        logger.info("Creating default config file %s"%config_file())
        cnf=Config(os.path.join(config_folder(),HISTORY_FILE), URL_NOT_SET,os.path.join(config_folder(),TORRENTS_DIR))
        dir_util.mkpath(config_folder())
        with open(os.path.join(config_folder(),CONFIG_FILE),'w') as f:
                yaml.dump(cnf,f)
        return cnf
Example #30
0
def mkdir_if_not_exists(dir):
    """Makes the specified directory if it doesn't exist.
    :dir: The directory name to be created.
    :returns: None
    """
    if not os.path.isdir(dir):
        mkpath(dir)
Example #31
0
parser.add_argument('--image_dir', default='keypoint_test_a_images_20180103')
parser.add_argument('--json_file',
                    default='keypoint_test_a_annotations_20180103.json')
parser.add_argument('--confidence', type=float, default=0.5)

args = parser.parse_args()

# set variable paths to images and json file
image_dir = os.path.join(args.data_dir, args.image_dir)

json_file = os.path.join(args.data_dir, args.json_file)
annos = json.load(open(json_file, 'r'))
print('annos', len(annos))

target_annotation_dir = os.path.join(args.out_dir)
mkpath(target_annotation_dir)

for idx, anno in enumerate(annos):
    # Print status.
    if (idx + 1) % 1000 == 0 or (idx + 1) == len(annos):
        print(str(idx + 1) + ' / ' + str(len(annos)) + "test")

    # read images
    image_path = image_dir + "/" + anno['image_id'] + '.jpg'
    image = cv2.imread(image_dir + "/" + anno['image_id'] + '.jpg',
                       cv2.IMREAD_COLOR)
    print(image_path)
    out_file = target_annotation_dir + "/" + anno['image_id'] + '.txt'
    f = open(out_file, 'w')

    max_im_shrink = (0x7fffffff / 200.0 / (image.shape[0] * image.shape[1])
Example #32
0
 def generate_mod(py_file):
     log.info("generating cffi module %r" % py_file)
     mkpath(os.path.dirname(py_file))
     updated = recompiler.make_py_source(ffi, module_name, py_file)
     if not updated:
         log.info("already up-to-date")
Example #33
0
def build(skip):
    """Build custom Python2 from source."""
    # Step 1: change into our build directory
    os.chdir(PYTHON_BUILD_DIR)
    # Don't compile Python if the skip option is passed
    if not skip:
        # Step 1.5: Add extra modules
        setup_dist = os.path.join(PYTHON_BUILD_DIR, 'Modules/Setup.dist')
        with open(setup_dist, "a") as f:
            log.debug("Adding additional modules to be included...")
            f.write("_socket socketmodule.c timemodule.c\n")
            f.write("_ssl _ssl.c -DUSE_SSL "
                    "-I{0}/include -I{0}/include/openssl -L{0}/lib "
                    "-lssl -lcrypto".format(OPENSSL_INSTALL_PATH))
        # Step 2: Run the Configure setup of Python to set correct paths
        os.chdir(PYTHON_BUILD_DIR)
        if os.path.isdir(PYTHON2_INSTALL):
            shutil.rmtree(PYTHON2_INSTALL, ignore_errors=True)
        mkpath(PYTHON2_INSTALL)
        log.info("Configuring Python...")
        cmd = [
            './configure',
            '--prefix={}'.format(PYTHON2_INSTALL),
            #    'CPPFLAGS=-I{}/include'.format(OPENSSL_INSTALL_PATH),
            #    'LDFLAGS=-L{}/lib'.format(OPENSSL_INSTALL_PATH),
            '--enable-shared',
            '--enable-toolbox-glue',
            '--with-ensurepip=install',
            '--enable-ipv6',
            '--with-threads',
            '--datarootdir={}/share'.format(PYTHON2_INSTALL),
            '--datadir={}/share'.format(PYTHON2_INSTALL),
        ]
        runner.Popen(cmd, stdout=sys.stdout)
        # Step 3: compile Python. this will take a while.

        # FIXME: We need to check return codes.
        log.info("Compiling Python. This will take a while time...")
        log.detail("Running Python make routine...")
        cmd = ['/usr/bin/make']
        runner.Popen(cmd, stdout=sys.stdout)
        sys.stdout.flush()  # does this help?

        log.debug("Create some temp files thats")
        log.detail("Running Python make install routine...")
        cmd = ['/usr/bin/make', 'install']
        runner.Popen(cmd, stdout=sys.stdout)
        sys.stdout.flush()  # does this help?
        # Step 4: Install pip + requirements
        os.chdir(os.path.join(PYTHON2_INSTALL, 'bin'))
        # Update pip to latest
        log.info("Upgrading pip...")
        cmd = ['./pip', 'install', '--upgrade', 'pip']
        runner.Popen(cmd, stdout=sys.stdout)
        # Install all pip modules from requirements.txt
        log.info("Install requirements...")
        requirements = os.path.join(CURRENT_DIR, 'requirements.txt')
        cmd = ['./python2.7', '-m', 'pip', 'install', '-r', requirements]
        runner.Popen(cmd, stdout=sys.stdout)
    else:
        log.info("Python compile skipped due to -skip option")
Example #34
0
    def build_library(self,
                      library,
                      pkg_config_name,
                      local_source=None,
                      supports_non_srcdir_builds=True):
        log.info("checking if library '%s' is installed", library)
        try:
            build_args = self.pkgconfig(pkg_config_name)
            log.info("found '%s' installed, using it", library)
        except CalledProcessError:

            # If local_source is not specified, then immediately fail.
            if local_source is None:
                raise DistutilsExecError("library '%s' is not installed",
                                         library)

            log.info("building library '%s' from source", library)

            # Determine which compilers we are to use, and what flags.
            # This is based on what distutils.sysconfig.customize_compiler()
            # does, but that function has a problem that it doesn't produce
            # necessary (e.g. architecture) flags for C++ compilers.
            cc, cxx, opt, cflags = get_config_vars('CC', 'CXX', 'OPT',
                                                   'CFLAGS')
            cxxflags = cflags

            if 'CC' in os.environ:
                cc = os.environ['CC']
            if 'CXX' in os.environ:
                cxx = os.environ['CXX']
            if 'CFLAGS' in os.environ:
                cflags = opt + ' ' + os.environ['CFLAGS']
            if 'CXXLAGS' in os.environ:
                cxxflags = opt + ' ' + os.environ['CXXFLAGS']

            # Use a subdirectory of build_temp as the build directory.
            build_temp = os.path.realpath(
                os.path.join(self.build_temp, library))

            # Destination for headers and libraries is build_clib.
            build_clib = os.path.realpath(self.build_clib)

            # Create build directories if they do not yet exist.
            mkpath(build_temp)
            mkpath(build_clib)

            if not supports_non_srcdir_builds:
                self._stage_files_recursive(local_source, build_temp)

            # Run configure.
            cmd = [
                '/bin/sh',
                os.path.join(os.path.realpath(local_source),
                             'configure'), '--prefix=' + build_clib,
                '--disable-shared', '--with-pic', '--disable-maintainer-mode'
            ]

            log.info('%s', ' '.join(cmd))
            check_call(cmd,
                       cwd=build_temp,
                       env=dict(self._environ,
                                CC=cc,
                                CXX=cxx,
                                CFLAGS=cflags,
                                CXXFLAGS=cxxflags))

            # Run make install.
            cmd = ['make', 'install']
            log.info('%s', ' '.join(cmd))
            check_call(cmd, cwd=build_temp, env=self._environ)

            build_args = self.pkgconfig(pkg_config_name)

        return build_args
 def mkpath(self, name, mode=0777):
     mkpath(name, mode, dry_run=self.dry_run)
Example #36
0
            files_differ = True
            #remove original file
            os.unlink(original_file)
    else:
        files_differ = True

    if files_differ:
        #Files differ, so overwrite original with candidate
        os.rename(candidate_file, original_file)


#
# generate compound code
#

mkpath(os.path.join(ROOT_DIR, "include/obj"))
mkpath(os.path.join(ROOT_DIR, "include/gen"))

mkpath(os.path.join(ROOT_DIR, "src/obj"))
mkpath(os.path.join(ROOT_DIR, "src/gen"))

for n in compound_names:
    x = compound_types[n]

    # skip natively implemented types
    #    if x.niflibtype: continue
    if x.name in NATIVETYPES.keys(): continue
    if n[:3] == 'ns ': continue

    if not GENALLFILES and not x.cname in GENBLOCKS:
        continue
Example #37
0
def read_args_from_stdin_and_run():
    ''' Main executable function to train and evaluate classifier.

    Post Condition
    --------------
    AUC and other eval info printed to stdout.
    Trained classifier saved ???.
    '''
    if not sys.stdin.isatty():
        for line in sys.stdin.readlines():
            line = line.strip()
            sys.argv.extend(line.split(' '))
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--dataset_path',
        default='/tmp/',
        type=str,
        help="Path to folder containing:" +
        " *.npy files: X_train, y_train, P_train"
        " *.txt files: X_colnames.txt and y_colnames.txt")
    parser.add_argument(
        '--pretrained_clf_path',
        default='/tmp/',
        type=str,
        help="Path to folder to hold output from classifier. Includes:" +
        " perf_metric*.txt files: auc_train.txt & auc_test.txt" +
        " settings.txt: description of all settings to reproduce.")
    parser.add_argument(
        '--output_path',
        default=None,
        type=str,
        help='Place to write output files. Defaults to pretrained_clf_path if not specified.')
    parser.add_argument(
        '--split_names',
        default='test')
    parser.add_argument(
        '--split_nicknames',
        default=None)
    parser.add_argument(
        '--features_path',
        default='/tmp/',
        type=str,
        help="Path to folder with SSAMfeat*.npy files")
    parser.add_argument(
        '--target_arr_name',
        default='Y',
        type=str,
        )
    parser.add_argument(
        '--target_names',
        default='all',
        type=str,
        help='Name of response/intervention to test.' +
        ' To try specific interventions, write names separated by commas.' +
        ' To try all interventions, use special name "all"')
    parser.add_argument(
        '--top_k',
        default=3, type=int,
        help='Num of predictions to take to compute hitrate')
    parser.add_argument(
        '--oracle_dataset_path',
        default=None, type=str,
        help='Path to train arrays used as oracle knowledge for predictions')
    parser.add_argument(
        '--seed_bootstrap',
        default=42,
        type=int,
        help='Seed for bootstrap')
    parser.add_argument(
        '--n_bootstraps',
        default=5000,
        type=int,
        help='Number of samples for bootstrap conf. intervals')
    parser.add_argument(
        '--bootstrap_stratify_pos_and_neg',
        default=True,
        type=int,
        help='Whether to stratify examples or not')

    args, unk_list = parser.parse_known_args()
    arg_dict = vars(args)

    dataset_path = arg_dict['dataset_path']
    if arg_dict['output_path'] is None:
        output_path = arg_dict['pretrained_clf_path']
    else:
        output_path = arg_dict['output_path']
        if not os.path.exists(output_path):
            mkpath(output_path)

    if arg_dict['oracle_dataset_path'] is None:
        arg_dict['oracle_dataset_path'] = dataset_path

    # Set default seed for numpy
    # Might be used by cross-validation, etc.
    np.random.seed(8675309)

    clf_opt_list = list()
    # Read parsed args from plain-text file
    # so we can exactly reproduce pretrained classifier
    try:
        with open(os.path.join(arg_dict['pretrained_clf_path'], 'settings.txt'), 'r') as f:
            for line in f.readlines():
                line = line.strip()
                clf_opt_list.append(line.split(' = '))
        clf_opts = dict(clf_opt_list)
    except IOError as e:
        clf_opts = dict()
        clf_opts['feature_arr_names'] = 'X' # DUMMY!
        clf_opts['classifier_name'] = arg_dict['pretrained_clf_path']

    feat_path_list = [
        arg_dict['dataset_path'],
        arg_dict['features_path']]

    pprint('[run_classifier says:] Loading dataset ...')
    start_time = time.time()
    feature_arr_names = clf_opts['feature_arr_names'].split(',')
    pprint('feature_arr_names:')
    feat_colnames_by_arr = OrderedDict()
    for feat_arr_name in feature_arr_names:
        pprint(feat_arr_name)
        cur_feat_colnames = None
        for feat_path in feat_path_list:
            colname_fpath = os.path.join(
                feat_path,
                feat_arr_name + '_colnames.txt')
            if os.path.exists(colname_fpath):
                cur_feat_colnames = \
                    [unicode(feat_arr_name + ":") + s
                        for s in load_list_of_unicode_from_txt(colname_fpath)]
                break
        feat_colnames_by_arr[feat_arr_name] = cur_feat_colnames

    target_arr_name = arg_dict['target_arr_name']
    all_target_names = load_list_of_strings_from_txt(os.path.join(
        arg_dict['dataset_path'],
        target_arr_name + '_colnames.txt'))
    target_names = arg_dict['target_names']
    if target_names == 'all':
        target_names = all_target_names
        target_cols = np.arange(len(all_target_names)).tolist()
    else:
        target_names = target_names.split(',')
        target_cols = list()
        for name in target_names:
            assert name in all_target_names
            target_cols.append(all_target_names.index(name))

    datasets_by_split = OrderedDict()
    split_names = arg_dict['split_names'].split(',')
    if arg_dict['split_nicknames'] is None:
        split_nicknames = split_names
    else:
        split_nicknames = arg_dict['split_nicknames'].split(',')

    alias_by_split = OrderedDict()
    for nickname, split_name in zip(split_nicknames,split_names):
        datasets_by_split[nickname] = dict()
        split_dataset = datasets_by_split[nickname]
        alias_by_split[nickname] = split_name

        # Load Y
        dense_fpath = os.path.join(
            dataset_path,
            target_arr_name + "_%s.npy" % split_name)
        y = np.asarray(np.load(dense_fpath), order='C', dtype=np.int32)
        if y.ndim < 2:
            y = y[:,np.newaxis]
        assert y.ndim == 2
        assert y.shape[1] == len(all_target_names)
        split_dataset['y'] = y[:, target_cols]
        assert split_dataset['y'].shape[1] == len(target_cols)

        # Load X
        x_list = list()      
        for feat_arr_name in feature_arr_names:
            x_cur = None

            def fpath_generator():
                for feat_path in feat_path_list:
                    for sname in [nickname, split_name]:
                        dense_fpath = os.path.join(
                            feat_path, feat_arr_name + "_" + sname + ".npy")
                        sparse_fpath = os.path.join(
                            feat_path, feat_arr_name + "_csr_" + sname + ".npz")
                        yield dense_fpath, sparse_fpath
            ds_path_list = [pair for pair in fpath_generator()] 
            for ii, (dense_fpath, sparse_fpath) in enumerate(ds_path_list):
                try:
                    if os.path.exists(sparse_fpath):
                        x_cur = load_csr_matrix(sparse_fpath)
                        assert np.all(np.isfinite(x_cur.data))
                        break
                    else:
                        x_cur = np.asarray(
                            np.load(dense_fpath),
                            order='C', dtype=np.float64)
                        if x_cur.ndim < 2:
                            x_cur = np.atleast_2d(x_cur).T
                        assert np.all(np.isfinite(x_cur))
                        break
                except IOError as e:
                    if ii == len(ds_path_list) - 1:
                        # Couldn't find desired file in any feat_path
                        raise e
                    else:
                        # Try the next feat_path in the list
                        pass
            if x_cur is not None:
                if feat_colnames_by_arr[feat_arr_name] is not None:
                    feat_dim = len(feat_colnames_by_arr[feat_arr_name])
                    assert x_cur.shape[1] == feat_dim
                else:
                    # Add dummy colnames
                    feat_dim = x_cur.shape[1]
                    n_sig_digits = np.maximum(
                        3, int(np.ceil(np.log10(feat_dim))))
                    fmt_str = "%s_%0" + str(n_sig_digits) + "d"
                    feat_colnames_by_arr[feat_arr_name] = [
                        fmt_str % (feat_arr_name, fid)
                            for fid in range(feat_dim)]
                x_list.append(x_cur)

        if isinstance(x_list[0], np.ndarray):
            split_dataset['x'] = np.hstack(x_list)
        else:
            split_dataset['x'] = scipy.sparse.hstack(x_list, format='csr')

        assert split_dataset['x'].ndim == 2
        assert split_dataset['x'].shape[0] == split_dataset['y'].shape[0]
        assert (
            isinstance(split_dataset['x'], np.ndarray)
            or isinstance(split_dataset['x'], scipy.sparse.csr_matrix)
            )

        if split_name == split_names[0]:
            # Flatten feat colnames into single list
            feat_colnames = sum(feat_colnames_by_arr.values(), [])
            assert isinstance(feat_colnames, list)
            assert len(feat_colnames) == split_dataset['x'].shape[1]

            print('y colnames: %s' % ' '.join(target_names))
            if len(feat_colnames) > 10:
                print('x colnames: %s ... %s' % (' '.join(feat_colnames[:5]), ' '.join(feat_colnames[-5:])))
            else:
                print('x colnames: %s' % ' '.join(feat_colnames))

        print('---- %5s dataset summary' % split_name)
        print('%9d total examples' % y.shape[0])
        print('y : %d x %d targets' % split_dataset['y'].shape)
        print('x : %d x %d features' % split_dataset['x'].shape)

        for c in xrange(len(target_names)):
            y_c = split_dataset['y'][:,c]
            print('target %s : frac pos %.3f' % (target_names[c], np.mean(y_c)))
            print('    %6d pos examples' % np.sum(y_c == 1))
            print('    %6d neg examples' % np.sum(y_c == 0))

    elapsed_time = time.time() - start_time
    print('[run_classifier says:] dataset loaded after %.2f sec.' % elapsed_time)

    print('[eval_pretrained_classifier says:] eval multilabel')
    eval_pretrained_avg_clf(
        classifier_name=clf_opts['classifier_name'],
        classifier_path=arg_dict['pretrained_clf_path'],
        datasets_by_split=datasets_by_split,
        alias_by_split=alias_by_split,
        dataset_path=arg_dict['dataset_path'],
        oracle_dataset_path=arg_dict['oracle_dataset_path'],
        feat_colnames=feat_colnames,
        output_path=output_path,
        target_arr_name=target_arr_name,
        label_names=all_target_names,
        feature_arr_names=feature_arr_names,
        seed_bootstrap=arg_dict['seed_bootstrap'],
        n_bootstraps=arg_dict['n_bootstraps'],
        bootstrap_stratify_pos_and_neg=arg_dict['bootstrap_stratify_pos_and_neg'],
        )
    elapsed_time = time.time() - start_time
    print('[eval_pretrained_classifier says:] completed after %.2f sec' % (elapsed_time))
Example #38
0
def byte_compile(py_files,
                 optimize=0,
                 force=0,
                 target_dir=None,
                 verbose=1,
                 dry_run=0,
                 direct=None):

    if direct is None:
        direct = (__debug__ and optimize == 0)

    # "Indirect" byte-compilation: write a temporary script and then
    # run it with the appropriate flags.
    if not direct:
        from tempfile import mktemp
        from distutils.util import execute, spawn
        script_name = mktemp(".py")
        if verbose:
            print("writing byte-compilation script '%s'" % script_name)
        if not dry_run:
            script = open(script_name, "w")
            script.write("""
from pluginbuilder.util import byte_compile
from modulegraph.modulegraph import *
files = [
""")

            for f in py_files:
                script.write(repr(f) + ",\n")
            script.write("]\n")
            script.write("""
byte_compile(files, optimize=%r, force=%r,
             target_dir=%r,
             verbose=%r, dry_run=0,
             direct=1)
""" % (optimize, force, target_dir, verbose))

            script.close()

        cmd = [sys.executable, script_name]
        if optimize == 1:
            cmd.insert(1, "-O")
        elif optimize == 2:
            cmd.insert(1, "-OO")
        spawn(cmd, verbose=verbose, dry_run=dry_run)
        execute(os.remove, (script_name, ),
                "removing %s" % script_name,
                verbose=verbose,
                dry_run=dry_run)

    else:
        from py_compile import compile
        from distutils.dir_util import mkpath

        for mod in py_files:
            # Terminology from the py_compile module:
            #   cfile - byte-compiled file
            #   dfile - purported source filename (same as 'file' by default)
            if mod.filename == mod.identifier:
                cfile = os.path.basename(mod.filename)
                dfile = cfile + (__debug__ and 'c' or 'o')
            else:
                cfile = mod.identifier.replace('.', os.sep)

                if mod.packagepath:
                    dfile = cfile + os.sep + '__init__.py' + (__debug__ and 'c'
                                                              or 'o')
                else:
                    dfile = cfile + '.py' + (__debug__ and 'c' or 'o')
            if target_dir:
                cfile = os.path.join(target_dir, dfile)

            if force or newer(mod.filename, cfile):
                if verbose:
                    print("byte-compiling %s to %s" % (mod.filename, dfile))
                if not dry_run:
                    mkpath(os.path.dirname(cfile))
                    suffix = os.path.splitext(mod.filename)[1]

                    if suffix in ('.py', '.pyw'):
                        zfile, pth = path_to_zip(mod.filename)
                        if zfile is None:
                            compile(mod.filename, cfile, dfile)
                        else:
                            fn = dfile + '.py'
                            open(fn, 'wb').write(get_zip_data(zfile, pth))
                            compile(mod.filename, cfile, dfile)
                            os.unlink(fn)

                    elif suffix in PY_SUFFIXES:
                        # Minor problem: This will happily copy a file
                        # <mod>.pyo to <mod>.pyc or <mod>.pyc to
                        # <mod>.pyo, but it does seem to work.
                        copy_file_data(mod.filename, cfile)
                    else:
                        raise RuntimeError \
                              ("Don't know how to handle %r" % mod.filename)
            else:
                if verbose:
                    print("skipping byte-compilation of %s to %s" % \
                          (mod.filename, dfile))
Example #39
0
    RedNose.score = 201  # above coverage
    nose_argv = ['', '-s', '-v', '--exe', '--rednose', '--nologcapture']

    if '--collect' in sys.argv:
        sys.argv.append('--collect-only')
    if '--collect-only' in sys.argv:  # this is a user trying simply to view the available tests. no need xunit.
        CTRexScenario.is_test_list = True
        xml_arg = ''
    else:
        xml_name = 'unit_test.xml'
        if CTRexScenario.setup_dir:
            CTRexScenario.setup_name = os.path.basename(
                CTRexScenario.setup_dir)
            xml_name = 'report_%s.xml' % CTRexScenario.setup_name
        xml_arg = '--xunit-file=%s/%s' % (CTRexScenario.report_dir, xml_name)
        mkpath(CTRexScenario.report_dir)

    sys_args = sys.argv[:]
    for i, arg in enumerate(sys.argv):
        if 'log-path' in arg:
            nose_argv += ['--nologcapture']
        else:
            for tests_type in CTRexScenario.test_types.keys():
                if tests_type in arg:
                    specific_tests = True
                    CTRexScenario.test_types[tests_type].append(
                        arg[arg.find(tests_type):])
                    sys_args.remove(arg)

    if not specific_tests:
        for key in ('--func', '--functional'):
Example #40
0
def copy_tree(src,
              dst,
              preserve_mode=1,
              preserve_times=1,
              preserve_symlinks=0,
              update=0,
              verbose=0,
              dry_run=0,
              condition=None):
    """
    Copy an entire directory tree 'src' to a new location 'dst'.  Both
    'src' and 'dst' must be directory names.  If 'src' is not a
    directory, raise DistutilsFileError.  If 'dst' does not exist, it is
    created with 'mkpath()'.  The end result of the copy is that every
    file in 'src' is copied to 'dst', and directories under 'src' are
    recursively copied to 'dst'.  Return the list of files that were
    copied or might have been copied, using their output name.  The
    return value is unaffected by 'update' or 'dry_run': it is simply
    the list of all files under 'src', with the names changed to be
    under 'dst'.

    'preserve_mode' and 'preserve_times' are the same as for
    'copy_file'; note that they only apply to regular files, not to
    directories.  If 'preserve_symlinks' is true, symlinks will be
    copied as symlinks (on platforms that support them!); otherwise
    (the default), the destination of the symlink will be copied.
    'update' and 'verbose' are the same as for 'copy_file'.
    """
    assert isinstance(src, str), repr(src)
    assert isinstance(dst, str), repr(dst)

    from distutils.dir_util import mkpath
    from distutils.file_util import copy_file
    from distutils.dep_util import newer
    from distutils.errors import DistutilsFileError
    from distutils import log

    if condition is None:
        condition = skipscm

    if not dry_run and not os_path_isdir(src):
        raise DistutilsFileError("cannot copy tree '%s': not a directory" %
                                 src)
    try:
        names = os_listdir(src)
    except os.error as xxx_todo_changeme:
        (errno, errstr) = xxx_todo_changeme.args
        if dry_run:
            names = []
        else:
            raise DistutilsFileError("error listing files in '%s': %s" %
                                     (src, errstr))

    if not dry_run:
        mkpath(dst)

    outputs = []

    for n in names:
        src_name = os.path.join(src, n)
        dst_name = os.path.join(dst, n)
        if (condition is not None) and (not condition(src_name)):
            continue

        if preserve_symlinks and os_path_islink(src_name):
            link_dest = os_readlink(src_name)
            log.info("linking %s -> %s", dst_name, link_dest)
            if not dry_run:
                if update and not newer(src, dst_name):
                    pass
                else:
                    if os_path_islink(dst_name):
                        os.remove(dst_name)
                    os.symlink(link_dest, dst_name)
            outputs.append(dst_name)

        elif os_path_isdir(src_name):
            outputs.extend(
                copy_tree(src_name,
                          dst_name,
                          preserve_mode,
                          preserve_times,
                          preserve_symlinks,
                          update,
                          dry_run=dry_run,
                          condition=condition))
        else:
            copy_file(src_name,
                      dst_name,
                      preserve_mode,
                      preserve_times,
                      update,
                      dry_run=dry_run)
            outputs.append(dst_name)

    return outputs
Example #41
0
 def mkpath(self, fn):
     mkpath(fn, dry_run=self.dry_run, verbose=self.verbose)
Example #42
0
    def finalize_options(self):
        # type: (None) -> None
        """Append custom openssl include file and library linking options."""
        build_ext.build_ext.finalize_options(self)
        self.openssl_default = None
        self.set_undefined_options('build', ('openssl', 'openssl'))
        if self.openssl is None:
            self.openssl = self.openssl_default
        self.set_undefined_options('build', ('bundledlls', 'bundledlls'))

        self.libraries = ['ssl', 'crypto']
        if sys.platform == 'win32':
            self.libraries = ['ssleay32', 'libeay32']
            if self.openssl and openssl_version(self.openssl, 0x10100000, True):
                self.libraries = ['libssl', 'libcrypto']
                self.swig_opts.append('-D_WIN32')
                # Swig doesn't know the version of MSVC, which causes errors in e_os2.h
                # trying to import stdint.h. Since python 2.7 is intimately tied to
                # MSVC 2008, it's harmless for now to define this. Will come back to
                # this shortly to come up with a better fix.
                self.swig_opts.append('-D_MSC_VER=1500')


        if sys.version_info[:1] >= (3,):
            self.swig_opts.append('-py3')

        log.debug('self.include_dirs = %s', self.include_dirs)
        log.debug('self.library_dirs = %s', self.library_dirs)

        if self.openssl is not None:
            log.debug('self.openssl = %s', self.openssl)
            openssl_library_dir = os.path.join(self.openssl, 'lib')
            openssl_include_dir = os.path.join(self.openssl, 'include')

            self.library_dirs.append(openssl_library_dir)
            self.include_dirs.append(openssl_include_dir)

            log.debug('self.include_dirs = %s', self.include_dirs)
            log.debug('self.library_dirs = %s', self.library_dirs)

        if platform.system() == "Linux":
            # For RedHat-based distros, the '-D__{arch}__' option for
            # Swig needs to be normalized, particularly on i386.
            mach = platform.machine().lower()
            if mach in ('i386', 'i486', 'i586', 'i686'):
                arch = '__i386__'
            elif mach in ('ppc64', 'powerpc64', 'ppc64le', 'ppc64el'):
                arch = '__powerpc64__'
            elif mach in ('ppc', 'powerpc'):
                arch = '__powerpc__'
            else:
                arch = '__%s__' % mach
            self.swig_opts.append('-D%s' % arch)
            if mach in ('ppc64le', 'ppc64el'):
                self.swig_opts.append('-D_CALL_ELF=2')

        self.swig_opts.extend(['-I%s' % i for i in self.include_dirs])

        # Some Linux distributor has added the following line in
        # /usr/include/openssl/opensslconf.h:
        #
        #     #include "openssl-x85_64.h"
        #
        # This is fine with C compilers, because they are smart enough to
        # handle 'local inclusion' correctly.  Swig, on the other hand, is
        # not as smart, and needs to be told where to find this file...
        #
        # Note that this is risky workaround, since it takes away the
        # namespace that OpenSSL uses.  If someone else has similarly
        # named header files in /usr/include, there will be clashes.
        if self.openssl is None:
            self.swig_opts.append('-I/usr/include/openssl')
        else:
            self.swig_opts.append('-I' + os.path.join(openssl_include_dir, 'openssl'))

        # swig seems to need the default header file directories
        self.swig_opts.extend(['-I%s' % i for i in _get_additional_includes()])
        self.swig_opts.append('-includeall')
        self.swig_opts.append('-modern')
        self.swig_opts.append('-builtin')

        # These two lines are a workaround for
        # http://bugs.python.org/issue2624 , hard-coding that we are only
        # building a single extension with a known path; a proper patch to
        # distutils would be in the run phase, when extension name and path are
        # known.
        self.swig_opts.extend(['-outdir',
                              os.path.join(os.getcwd(), 'M2Crypto')])
        self.include_dirs.append(os.path.join(os.getcwd(), 'SWIG'))

        if sys.platform == 'cygwin' and self.openssl is not None:
            # Cygwin SHOULD work (there's code in distutils), but
            # if one first starts a Windows command prompt, then bash,
            # the distutils code does not seem to work. If you start
            # Cygwin directly, then it would work even without this change.
            # Someday distutils will be fixed and this won't be needed.
            self.library_dirs += [os.path.join(self.openssl, 'bin')]

        mkpath(os.path.join(self.build_lib, 'M2Crypto'))
Example #43
0
 def write_rev_file():
     mkpath(root_dir)
     with open(rev_file_name, 'w') as rev_file:
         rev_file.write(rev_value)
Example #44
0
    def run(self):
        log.info("Loading checksums.")
        libfgb_pkgdir = "pkgs/libfgb"
        checksums_file = os.path.join(libfgb_pkgdir, "checksums.ini")
        with open(checksums_file, 'r') as f:
            checksums = dict(s.split('=') for s in f.read().split('\n') if s)

        log.info("Checking if upstream tar file exists.")
        if not os.path.exists("upstream"):
            mkpath("upstream")
        libfgb_file = os.path.join("upstream", checksums["tarball"])
        if not os.path.exists(libfgb_file):
            log.warn(
                "File %s does not exist. Attempting to download from %s." %
                (libfgb_file, UPSTREAM_TAR_URL))
            # We use curl to avoid [SSL: CERTIFICATE_VERIFY_FAILED] errors
            os.system("curl --insecure --create-dirs -o %s %s" %
                      (libfgb_file, UPSTREAM_TAR_URL))
            if not os.path.exists(libfgb_file):
                log.error(
                    """Download failed. You may wish to download the file "%s" manually from "%s" and place it in the "upstream/" directory."""
                    % (checksums["tarball"], UPSTREAM_TAR_URL))
                sys.exit(1)
        if sha1sum(libfgb_file) != checksums["sha1"]:
            log.error("Checksum for file %s is different." % libfgb_file)
            sys.exit(1)

        log.info("Creating directories.")
        if os.path.exists("local"):
            remove_tree("local")
        mkpath("local/include")
        mkpath("local/lib")
        tmpdir = "local/var/tmp"
        mkpath(tmpdir)

        log.info("Extracting tar file.")
        import tarfile
        tar = tarfile.open(libfgb_file)
        tar.extractall(tmpdir)
        tar.close()
        libfgb_builddir = os.path.join(tmpdir, UPSTREAM_TAR_BASEDIR)
        if not os.path.exists(libfgb_builddir):
            log.error("Failed to extract files properly.")
            sys.exit(1)

        log.info("Applying patches.")
        os.chdir(libfgb_builddir)
        patches_dir = os.path.join(cwd, libfgb_pkgdir, "patches")
        patchfiles = [
            os.path.join(patches_dir, s)
            for s in sorted(os.listdir(patches_dir)) if s.endswith('.patch')
        ]
        for p in patchfiles:
            log.info("Applying %s" % p)
            if os.system("patch -p1 < '%s'" % p):
                log.error("Failed to apply patches.")
                sys.exit(1)
        os.chdir(cwd)

        log.info("Copying include and lib files.")
        if UNAME == "Darwin":
            FGB_LIBDIR = "macosx"
        elif UNAME == "Linux":
            FGB_LIBDIR = "x64"
        else:
            log.error(
                "Error installing libfgb: libfgb is not available for this platform."
            )
            sys.exit(1)
        for f in [
                "nv/int/protocol_maple.h", "nv/maple/C/call_fgb.h",
                "nv/maple/C/call_fgb_basic.h"
        ]:
            copy_file(os.path.join(libfgb_builddir, f), "local/include")
        copy_tree(os.path.join(libfgb_builddir, "nv/protocol"),
                  "local/include")
        copy_tree(os.path.join(libfgb_builddir, "nv/maple/C", FGB_LIBDIR),
                  "local/lib")
Example #45
0
        print ("OK.")    

    print ("Compiling BBarolo... ",end="")
    sys.stdout.flush()
    ret = subprocess.call("make -j%i lib"%mpr.cpu_count(), shell=True, stdout=f)
    if ret!=0: sys.exit("\nCompilation failed. Check %s for errors.\n"%logfile)
    print ("OK.")
    

if sys.argv[1]=='sdist':  
    # If we are creating the sdist package, make a tar with BB source
    try: remove_tree("pyBBarolo/BBarolo")
    except: pass
    try: remove_tree("pyBBarolo/BBarolo.tar.gz")
    except: pass
    mkpath("pyBBarolo/BBarolo")    
    subprocess.call("cp -r src/ pyBBarolo/BBarolo/src", shell=True,stdout=f)
    subprocess.call("cp -r config/ pyBBarolo/BBarolo/config", shell=True,stdout=f)
    subprocess.call("cp -r configure pyBBarolo/BBarolo/", shell=True,stdout=f)
    subprocess.call("cp -r Makefile.in pyBBarolo/BBarolo/", shell=True,stdout=f)
    subprocess.call("cp -r Makefile.in pyBBarolo/BBarolo/", shell=True,stdout=f)
    subprocess.call("rm -rf pyBBarolo/BBarolo/src/Build", shell=True,stdout=f)
    subprocess.call("cd pyBBarolo && tar -czvf BBarolo.tar.gz BBarolo", shell=True,stdout=f)
    remove_tree("pyBBarolo/BBarolo")
    
    # If we creating the dist, additional file is the tar just created
    package_data = {'pyBBarolo': ['*.tar.gz']}
    
else:
    print ("------ Installing pyBBarolo v%s ------"%version)
    
Example #46
0
def build_native_image_image():
    image_path = native_image_path(suite_native_image_root())
    mx.log('Building native-image executable ' + image_path)
    image_dir = dirname(image_path)
    mkpath(image_dir)
    native_image_on_jvm(['--tool:native-image', '-H:Path=' + image_dir])
Example #47
0
def copytree(src,
             dst,
             ignore_pattern_if_file_exists='*',
             preserve_mode=True,
             preserve_times=False,
             preserve_symlinks=False,
             update=False,
             verbose=1,
             dry_run=0):
    """
    A slightly modified version of `distutils.dir_util.copytree`, with the
    added parameter `ignore_pattern_if_file_exists` that ignores files if it
    matches the given pattern and already exists in the destination directory.
    By default, ignores all existing destination files.
    """

    if not dry_run and not os.path.isdir(src):
        raise DistutilsFileError("cannot copy tree '%s': not a directory" %
                                 src)
    try:
        names = os.listdir(src)
    except OSError as e:
        if dry_run:
            names = []
        else:
            raise DistutilsFileError("error listing files in '%s': %s" %
                                     (src, e.strerror)) from e

    if not dry_run:
        mkpath(dst, verbose=verbose)

    outputs = []
    outputs_ignored = []

    for n in names:
        src_name = os.path.join(src, n)
        dst_name = os.path.join(dst, n)

        if n.startswith('.nfs'):
            # skip NFS rename files
            continue

        if preserve_symlinks and os.path.islink(src_name):
            link_dest = os.readlink(src_name)
            if verbose >= 1:
                log.info("linking %s -> %s", dst_name, link_dest)
            if not dry_run:
                os.symlink(link_dest, dst_name)
            outputs.append(dst_name)

        elif os.path.isdir(src_name):
            part_outputs, part_outputs_ignored = copytree(
                src_name,
                dst_name,
                ignore_pattern_if_file_exists,
                preserve_mode,
                preserve_times,
                preserve_symlinks,
                update,
                verbose=verbose,
                dry_run=dry_run)
            outputs.extend(part_outputs)
            outputs_ignored.extend(part_outputs_ignored)

        elif not (fnmatch.fnmatch(dst_name, ignore_pattern_if_file_exists)
                  and os.path.exists(dst_name)):
            copy_file(src_name,
                      dst_name,
                      preserve_mode,
                      preserve_times,
                      update,
                      verbose=verbose,
                      dry_run=dry_run)
            outputs.append(dst_name)

        else:
            outputs_ignored.append(dst_name)

    return outputs, outputs_ignored
Example #48
0
            replacements = self._file_replacements(output_fname)
            replacements['PERIPHERAL'] = p.to_cpp()

            template = string.Template(template_string)
            with open(output_fpath, 'w') as f:
                print "Writing " + output_fpath + "...",
                f.write(template.safe_substitute(replacements))
                print "done"

            peripheral_header_fnames.append(output_fname)
        return peripheral_header_fnames

    def generate_header_files(self, output_path=''):
        fnames = self._generate_peripheral_files(output_path)
        return self._generate_device_file(fnames, output_path)


parser = argparse.ArgumentParser()
parser.add_argument("svd_file", help="SVD input file")
parser.add_argument("out_dir",
                    help="Directory for generated files",
                    default='')

args = parser.parse_args()

dev = Device.from_svdfile(args.svd_file, svd_defaults)

if (args.out_dir != ''):
    mkpath(args.out_dir)
dev.generate_header_files(args.out_dir)
Example #49
0
def draw_results(images,
                 heats_ground,
                 heats_result,
                 pafs_ground,
                 pafs_result,
                 masks,
                 name=''):
    """Save results for debugging.

    Parameters
    -----------
    images : a list of RGB images
    heats_ground : a list of keypoint heat maps or None
    heats_result : a list of keypoint heat maps or None
    pafs_ground : a list of paf vector maps or None
    pafs_result : a list of paf vector maps or None
    masks : a list of mask for people
    """
    # interval = len(images)
    for i in range(len(images)):
        if heats_ground is not None:
            heat_ground = heats_ground[i]
        if heats_result is not None:
            heat_result = heats_result[i]
        if pafs_ground is not None:
            paf_ground = pafs_ground[i]
        if pafs_result is not None:
            paf_result = pafs_result[i]
        if masks is not None:
            mask = masks[:, :, :, 0]
            mask = mask.reshape(hout, wout, 1)
            mask1 = np.repeat(mask, n_pos, 2)
            mask2 = np.repeat(mask, n_pos * 2, 2)

        image = images[i]

        fig = plt.figure(figsize=(8, 8))
        a = fig.add_subplot(2, 3, 1)
        plt.imshow(image)

        if pafs_ground is not None:
            a = fig.add_subplot(2, 3, 2)
            a.set_title('Vectormap_ground')
            vectormap = paf_ground * mask2
            tmp2 = vectormap.transpose((2, 0, 1))
            tmp2_odd = np.amax(np.absolute(tmp2[::2, :, :]), axis=0)
            tmp2_even = np.amax(np.absolute(tmp2[1::2, :, :]), axis=0)

            # tmp2_odd = tmp2_odd * 255
            # tmp2_odd = tmp2_odd.astype(np.int)
            plt.imshow(tmp2_odd, alpha=0.3)

            # tmp2_even = tmp2_even * 255
            # tmp2_even = tmp2_even.astype(np.int)
            plt.colorbar()
            plt.imshow(tmp2_even, alpha=0.3)

        if pafs_result is not None:
            a = fig.add_subplot(2, 3, 3)
            a.set_title('Vectormap result')
            if masks is not None:
                vectormap = paf_result * mask2
            else:
                vectormap = paf_result
            tmp2 = vectormap.transpose((2, 0, 1))
            tmp2_odd = np.amax(np.absolute(tmp2[::2, :, :]), axis=0)
            tmp2_even = np.amax(np.absolute(tmp2[1::2, :, :]), axis=0)
            plt.imshow(tmp2_odd, alpha=0.3)

            plt.colorbar()
            plt.imshow(tmp2_even, alpha=0.3)

        if heats_result is not None:
            a = fig.add_subplot(2, 3, 4)
            a.set_title('Heatmap result')
            if masks is not None:
                heatmap = heat_result * mask1
            else:
                heatmap = heat_result
            tmp = heatmap
            tmp = np.amax(heatmap[:, :, :-1], axis=2)

            plt.colorbar()
            plt.imshow(tmp, alpha=0.3)

        if heats_ground is not None:
            a = fig.add_subplot(2, 3, 5)
            a.set_title('Heatmap ground truth')
            if masks is not None:
                heatmap = heat_ground * mask1
            else:
                heatmap = heat_ground
            tmp = heatmap
            tmp = np.amax(heatmap[:, :, :-1], axis=2)

            plt.colorbar()
            plt.imshow(tmp, alpha=0.3)

        if masks is not None:
            a = fig.add_subplot(2, 3, 6)
            a.set_title('Mask')
            # print(mask.shape, tmp.shape)
            plt.colorbar()
            plt.imshow(mask[:, :, 0], alpha=0.3)
        # plt.savefig(str(i)+'.png',dpi=300)
        # plt.show()

        mkpath(config.LOG.vis_path)
        plt.savefig(os.path.join(config.LOG.vis_path, '%s%d.png' % (name, i)),
                    dpi=300)
Example #50
0
 def mkpath(self, name, mode=0o777):
     dir_util.mkpath(name, mode, dry_run=self.dry_run)
Example #51
0
"""distutils.archive_util
Example #52
0
        condition = skipscm

    if not dry_run and not os_path_isdir(src):
        raise DistutilsFileError, \
              "cannot copy tree '%s': not a directory" % src
    try:
        names = os_listdir(src)
    except os.error, (errno, errstr):
        if dry_run:
            names = []
        else:
            raise DistutilsFileError, \
                  "error listing files in '%s': %s" % (src, errstr)

    if not dry_run:
        mkpath(dst)

    outputs = []

    for n in names:
        src_name = os.path.join(src, n)
        dst_name = os.path.join(dst, n)
        if (condition is not None) and (not condition(src_name)):
            continue

        if preserve_symlinks and os_path_islink(src_name):
            link_dest = os_readlink(src_name)
            log.info("linking %s -> %s", dst_name, link_dest)
            if not dry_run:
                if update and not newer(src, dst_name):
                    pass
# Set accepted limit, lim
lims = [1000, 5000, 10000]
distances = []
for dist_measure in ['NRMSE']:
    distances.extend(
        ['{}_{}'.format(t, dist_measure) for t in config['targets']])
    distances.append(dist_measure)
# for lim in lims:
for lim in lims:
    for d in distances:
        print("Working on {}".format(d.upper()))
        figPath = "/home/buck06191/Dropbox/phd/Bayesian_fitting/{}/{}/{}/Figures/{}".format(
            model_name, DATASET, lim, d)

        dir_util.mkpath(figPath)
        print("Plotting total histogram")
        hist1 = histogram_plot(results, distance=d, frac=1)
        hist1.savefig(os.path.join(figPath,
                                   'full_histogram_{}.png'.format(DATASET)),
                      bbox_inches='tight')
        print("Plotting fraction histogram")
        hist2 = histogram_plot(results, distance=d, limit=lim)
        hist2.savefig(os.path.join(
            figPath, 'tol_{}_histogram_{}.png'.format(
                str(lim).replace('.', '_'), DATASET)),
                      bbox_inches='tight')
        print("Considering {} lowest values".format(lim))
        # print("Generating scatter plot")
        # scatter_dist_plot(results, params, tolerance=tol, n_ticks=4)
        print("Generating KDE plot")
def do_training(args, module, data_train, data_val, begin_epoch=0):
    from distutils.dir_util import mkpath
    from log_util import LogUtil

    log = LogUtil().getlogger()
    mkpath(os.path.dirname(get_checkpoint_path(args)))

    #seq_len = args.config.get('arch', 'max_t_count')
    batch_size = args.config.getint('common', 'batch_size')
    save_checkpoint_every_n_epoch = args.config.getint(
        'common', 'save_checkpoint_every_n_epoch')
    save_checkpoint_every_n_batch = args.config.getint(
        'common', 'save_checkpoint_every_n_batch')
    enable_logging_train_metric = args.config.getboolean(
        'train', 'enable_logging_train_metric')
    enable_logging_validation_metric = args.config.getboolean(
        'train', 'enable_logging_validation_metric')

    contexts = parse_contexts(args)
    num_gpu = len(contexts)
    eval_metric = STTMetric(batch_size=batch_size,
                            num_gpu=num_gpu,
                            is_logging=enable_logging_validation_metric,
                            is_epoch_end=True)
    # tensorboard setting
    loss_metric = STTMetric(batch_size=batch_size,
                            num_gpu=num_gpu,
                            is_logging=enable_logging_train_metric,
                            is_epoch_end=False)

    optimizer = args.config.get('optimizer', 'optimizer')
    learning_rate = args.config.getfloat('train', 'learning_rate')
    learning_rate_annealing = args.config.getfloat('train',
                                                   'learning_rate_annealing')

    mode = args.config.get('common', 'mode')
    num_epoch = args.config.getint('train', 'num_epoch')
    clip_gradient = args.config.getfloat('optimizer', 'clip_gradient')
    weight_decay = args.config.getfloat('optimizer', 'weight_decay')
    save_optimizer_states = args.config.getboolean('train',
                                                   'save_optimizer_states')
    show_every = args.config.getint('train', 'show_every')
    optimizer_params_dictionary = json.loads(
        args.config.get('optimizer', 'optimizer_params_dictionary'))
    kvstore_option = args.config.get('common', 'kvstore_option')
    n_epoch = begin_epoch
    is_bucketing = args.config.getboolean('arch', 'is_bucketing')

    if clip_gradient == 0:
        clip_gradient = None
    if is_bucketing and mode == 'load':
        model_file = args.config.get('common', 'model_file')
        model_name = os.path.splitext(model_file)[0]
        model_num_epoch = int(model_name[-4:])

        model_path = 'checkpoints/' + str(model_name[:-5])
        symbol, data_names, label_names = module(1600)
        model = STTBucketingModule(
            sym_gen=module,
            default_bucket_key=data_train.default_bucket_key,
            context=contexts)
        data_train.reset()

        model.bind(data_shapes=data_train.provide_data,
                   label_shapes=data_train.provide_label,
                   for_training=True)
        _, arg_params, aux_params = mx.model.load_checkpoint(
            model_path, model_num_epoch)
        model.set_params(arg_params, aux_params)
        module = model
    else:
        module.bind(data_shapes=data_train.provide_data,
                    label_shapes=data_train.provide_label,
                    for_training=True)

    if begin_epoch == 0 and mode == 'train':
        module.init_params(initializer=get_initializer(args))

    lr_scheduler = SimpleLRScheduler(learning_rate=learning_rate)

    def reset_optimizer(force_init=False):
        optimizer_params = {
            'lr_scheduler': lr_scheduler,
            'clip_gradient': clip_gradient,
            'wd': weight_decay
        }
        optimizer_params.update(optimizer_params_dictionary)
        module.init_optimizer(kvstore=kvstore_option,
                              optimizer=optimizer,
                              optimizer_params=optimizer_params,
                              force_init=force_init)

    if mode == "train":
        reset_optimizer(force_init=True)
    else:
        reset_optimizer(force_init=False)
        data_train.reset()
        data_train.is_first_epoch = True

    #tensorboard setting
    tblog_dir = args.config.get('common', 'tensorboard_log_dir')
    summary_writer = SummaryWriter(tblog_dir)

    while True:

        if n_epoch >= num_epoch:
            break
        loss_metric.reset()
        log.info('---------train---------')
        for nbatch, data_batch in enumerate(data_train):
            module.forward_backward(data_batch)
            module.update()
            # tensorboard setting
            if (nbatch + 1) % show_every == 0:
                module.update_metric(loss_metric, data_batch.label)
            #summary_writer.add_scalar('loss batch', loss_metric.get_batch_loss(), nbatch)
            if (nbatch + 1) % save_checkpoint_every_n_batch == 0:
                log.info('Epoch[%d] Batch[%d] SAVE CHECKPOINT', n_epoch,
                         nbatch)
                module.save_checkpoint(
                    prefix=get_checkpoint_path(args) + "n_epoch" +
                    str(n_epoch) + "n_batch",
                    epoch=(int(
                        (nbatch + 1) / save_checkpoint_every_n_batch) - 1),
                    save_optimizer_states=save_optimizer_states)
        # commented for Libri_sample data set to see only train cer
        log.info('---------validation---------')
        data_val.reset()
        eval_metric.reset()
        for nbatch, data_batch in enumerate(data_val):
            # when is_train = False it leads to high cer when batch_norm
            module.forward(data_batch, is_train=True)
            module.update_metric(eval_metric, data_batch.label)

        # tensorboard setting
        val_cer, val_n_label, val_l_dist, _ = eval_metric.get_name_value()
        log.info("Epoch[%d] val cer=%f (%d / %d)", n_epoch, val_cer,
                 int(val_n_label - val_l_dist), val_n_label)
        curr_acc = val_cer
        summary_writer.add_scalar('CER validation', val_cer, n_epoch)
        assert curr_acc is not None, 'cannot find Acc_exclude_padding in eval metric'

        data_train.reset()
        data_train.is_first_epoch = False

        # tensorboard setting
        train_cer, train_n_label, train_l_dist, train_ctc_loss = loss_metric.get_name_value(
        )
        summary_writer.add_scalar('loss epoch', train_ctc_loss, n_epoch)
        summary_writer.add_scalar('CER train', train_cer, n_epoch)

        # save checkpoints
        if n_epoch % save_checkpoint_every_n_epoch == 0:
            log.info('Epoch[%d] SAVE CHECKPOINT', n_epoch)
            module.save_checkpoint(prefix=get_checkpoint_path(args),
                                   epoch=n_epoch,
                                   save_optimizer_states=save_optimizer_states)

        n_epoch += 1

        lr_scheduler.learning_rate = learning_rate / learning_rate_annealing

    log.info('FINISH')
Example #55
0
    def run(self):

        # Execute sanity checks
        warnings = self._directive_checks()
        if warnings:
            return warnings

        # Fetch builder and environment objects
        env = self.state_machine.document.settings.env
        builder = env.app.builder

        # Determine document directory
        document_dir = dirname(env.doc2path(env.docname))

        # Load content to render
        if not self.arguments:
            content = '\n'.join(self.content)
        else:
            # Source file should be relative to document, or absolute to
            # configuration directory.
            srcfile = self.arguments[0]

            if isabs(srcfile):
                srcpath = join(env.app.confdir, relpath(srcfile, start='/'))
            else:
                srcpath = join(document_dir, srcfile)

            if not isfile(srcpath):
                warning = self.state_machine.reporter.warning(
                    '{} directive cannot find file {}'.format(
                        self._get_directive_name(), srcfile),
                    line=self.lineno)
                return [warning]

            with open(srcpath, 'rb') as fd:
                content = fd.read().decode('utf-8')

        # Execute plantweb call
        try:
            output, frmt, engine, sha = render(content,
                                               engine=self._get_engine_name())
        except Exception:
            msg = format_exc()
            error = nodes.error(
                '',
                self.state_machine.reporter.error('',
                                                  nodes.literal_block('', msg),
                                                  line=self.lineno))
            return [error]

        # Determine filename
        filename = '{}.{}'.format(sha, frmt)
        imgpath = join(builder.outdir, builder.imagedir, 'plantweb')

        # Create images output folder
        log.debug('imgpath set to {}'.format(imgpath))
        mkpath(imgpath)

        # Write content
        filepath = join(imgpath, filename)

        with open(filepath, 'wb') as fd:
            fd.write(output)

        log.debug('Wrote image file {}'.format(filepath))

        # Default to align center
        if 'align' not in self.options:
            self.options['align'] = 'center'

        # Determine relative path to image from source document directory
        filepath_relative = relpath(filepath, document_dir)
        log.debug('Image relative path {}'.format(filepath_relative))

        # Run Image directive
        self.arguments = [filepath_relative]
        return Image.run(self)
Example #56
0
"""tuple: original resolution of camera."""
RATIO = original_resolution[0] / original_resolution[1]
"""float: aspect ratio of the camera."""
# Initial parameters and variables
SIZE = screen_size[1]  # 640
"""int: height of preview image."""
RESOLUTION = (int(SIZE * RATIO), SIZE)
"""tuple: scaled resolution of preview."""
TODAY = datetime.today().strftime("%d-%m-%y")
"""str: today's date for use in naming directories."""
# CURRENT_DIR = path.abspath(path.dirname(__file__))
BASE_DIR = filedialog.askdirectory(initialdir='/media/pi')
"""str:  base directory to put photos."""
PHOTO_DIR = path.join(BASE_DIR, "photos", TODAY)
"""str: directory to store images in."""
dir_util.mkpath(PHOTO_DIR)
print("Base directory set to {}".format(PHOTO_DIR))
current_dirs = os.listdir(PHOTO_DIR)
"""list: List of current directories inside `PHOTO_DIR`."""


def get_uid(current_dirs):
    """ Get current user ID.
    Args:
        current_dirs (list of str): List of directories currently in PHOTO_DIR.
    Returns:
        current_uid (int): Current user ID
    """

    # current_uid = 1
    if len(current_dirs) != 0:
Example #57
0
    def build_library(
        self,
        library,
        pkg_config_name,
        local_source=None,
        supports_non_srcdir_builds=True,
    ):
        log.info("checking if library '%s' is installed", library)
        try:
            build_args = self.pkgconfig(pkg_config_name)
            log.info("found '%s' installed, using it", library)
        except CalledProcessError:

            # If local_source is not specified, then immediately fail.
            if local_source is None:
                raise DistutilsExecError("library '%s' is not installed", library)

            log.info("building library '%s' from source", library)

            env = self.env()

            # Determine which compilers we are to use, and what flags.
            # This is based on what distutils.sysconfig.customize_compiler()
            # does, but that function has a problem that it doesn't produce
            # necessary (e.g. architecture) flags for C++ compilers.
            cc, cxx, opt, cflags = get_config_vars("CC", "CXX", "OPT", "CFLAGS")
            cxxflags = cflags

            if "CC" in env:
                cc = env["CC"]
            if "CXX" in env:
                cxx = env["CXX"]
            if "CFLAGS" in env:
                cflags = opt + " " + env["CFLAGS"]
            if "CXXFLAGS" in env:
                cxxflags = opt + " " + env["CXXFLAGS"]

            # Use a subdirectory of build_temp as the build directory.
            build_temp = os.path.realpath(os.path.join(self.build_temp, library))

            # Destination for headers and libraries is build_clib.
            build_clib = os.path.realpath(self.build_clib)

            # Create build directories if they do not yet exist.
            mkpath(build_temp)
            mkpath(build_clib)

            if not supports_non_srcdir_builds:
                self._stage_files_recursive(local_source, build_temp)

            # Run configure.
            cmd = [
                "/bin/sh",
                os.path.join(os.path.realpath(local_source), "configure"),
                "--prefix=" + build_clib,
                "--disable-shared",
                "--with-pic",
                "--disable-maintainer-mode",
            ]

            log.info("%s", " ".join(cmd))
            check_call(
                cmd,
                cwd=build_temp,
                env=dict(env, CC=cc, CXX=cxx, CFLAGS=cflags, CXXFLAGS=cxxflags),
            )

            # Run make install.
            cmd = ["make", "install"]
            log.info("%s", " ".join(cmd))
            check_call(cmd, cwd=build_temp, env=env)

            build_args = self.pkgconfig(pkg_config_name)

        return build_args
Example #58
0
 def mkpath(self, name, mode=511):
     mkpath(name, mode, dry_run=self.dry_run)
Example #59
0
def copy_tree(
    src,
    dst,
    preserve_mode=1,
    preserve_times=1,
    preserve_symlinks=0,
    update=0,
    verbose=0,
    dry_run=0,
    condition=None,
):
    """
    Copy an entire directory tree 'src' to a new location 'dst'.  Both
    'src' and 'dst' must be directory names.  If 'src' is not a
    directory, raise DistutilsFileError.  If 'dst' does not exist, it is
    created with 'mkpath()'.  The end result of the copy is that every
    file in 'src' is copied to 'dst', and directories under 'src' are
    recursively copied to 'dst'.  Return the list of files that were
    copied or might have been copied, using their output name.  The
    return value is unaffected by 'update' or 'dry_run': it is simply
    the list of all files under 'src', with the names changed to be
    under 'dst'.

    'preserve_mode' and 'preserve_times' are the same as for
    'copy_file'; note that they only apply to regular files, not to
    directories.  If 'preserve_symlinks' is true, symlinks will be
    copied as symlinks (on platforms that support them!); otherwise
    (the default), the destination of the symlink will be copied.
    'update' and 'verbose' are the same as for 'copy_file'.
    """
    assert isinstance(src, (str, unicode)), repr(src)
    assert isinstance(dst, (str, unicode)), repr(dst)

    from distutils import log
    from distutils.dep_util import newer
    from distutils.dir_util import mkpath
    from distutils.errors import DistutilsFileError

    src = fsencoding(src)
    dst = fsencoding(dst)

    if condition is None:
        condition = skipscm

    if not dry_run and not zipio.isdir(src):
        raise DistutilsFileError("cannot copy tree '%s': not a directory" %
                                 src)
    try:
        names = zipio.listdir(src)
    except os.error as exc:
        (errno, errstr) = exc.args
        if dry_run:
            names = []
        else:
            raise DistutilsFileError("error listing files in '%s': %s" %
                                     (src, errstr))

    if not dry_run:
        mkpath(dst)

    outputs = []

    for n in names:
        src_name = os.path.join(src, n)
        dst_name = os.path.join(dst, n)
        if (condition is not None) and (not condition(src_name)):
            continue

        # Note: using zipio's internal _locate function throws an IOError on
        # dead symlinks, so handle it here.
        if os.path.islink(src_name) and not os.path.exists(
                os.readlink(src_name)):
            continue

        if preserve_symlinks and zipio.islink(src_name):
            link_dest = zipio.readlink(src_name)
            log.info("linking %s -> %s", dst_name, link_dest)
            if not dry_run:
                if update and not newer(src, dst_name):
                    pass
                else:
                    make_symlink(link_dest, dst_name)
            outputs.append(dst_name)

        elif zipio.isdir(src_name) and not os.path.isfile(src_name):
            # ^^^ this odd tests ensures that resource files that
            # happen to be a zipfile won't get extracted.
            outputs.extend(
                copy_tree(
                    src_name,
                    dst_name,
                    preserve_mode,
                    preserve_times,
                    preserve_symlinks,
                    update,
                    dry_run=dry_run,
                    condition=condition,
                ))
        else:
            copy_file(
                src_name,
                dst_name,
                preserve_mode,
                preserve_times,
                update,
                dry_run=dry_run,
            )
            outputs.append(dst_name)

    return outputs
    def run(self):
        """Run the distutils command"""
        log.info("installing library code to %s" % self.bdist_dir)

        log.info("generating INFO_SRC and INFO_BIN files")
        generate_info_files()

        self.dist_name = get_dist_name(self.distribution,
                                       source_only_dist=True,
                                       python_version=get_python_version()[0])
        self.dist_target = os.path.join(self.dist_dir, self.dist_name)
        log.info("distribution will be available as '%s'" % self.dist_target)

        # build command: just to get the build_base
        cmdbuild = self.get_finalized_command("build")
        self.build_base = cmdbuild.build_base

        # install command
        install = self.reinitialize_command('install_lib',
                                            reinit_subcommands=1)
        install.compile = False
        install.warn_dir = 0
        install.install_dir = self.bdist_dir

        log.info("installing to %s" % self.bdist_dir)
        self.run_command('install_lib')

        # install extra files
        extra_files = {}
        for src, dest in extra_files.items():
            self.copy_file(src, dest)

        # install_egg_info command
        cmd_egginfo = self.get_finalized_command('install_egg_info')
        cmd_egginfo.install_dir = self.bdist_dir
        self.run_command('install_egg_info')
        # we need the py2.x converted to py2 in the filename
        old_egginfo = cmd_egginfo.get_outputs()[0]
        new_egginfo = old_egginfo.replace('-py' + sys.version[:3],
                                          '-py' + get_python_version()[0])
        move_file(old_egginfo, new_egginfo)

        # create distribution
        info_files = [
            ('README.txt', 'README.txt'),
            ('LICENSE.txt', 'LICENSE.txt'),
            ('README.rst', 'README.rst'),
            ('CONTRIBUTING.rst', 'CONTRIBUTING.rst'),
            ('docs/INFO_SRC', 'INFO_SRC'),
            ('docs/INFO_BIN', 'INFO_BIN'),
        ]
        copy_tree(self.bdist_dir, self.dist_target)
        pkg_info = mkpath(os.path.join(self.dist_target))
        for src, dst in info_files:
            if dst is None:
                copy_file(src, self.dist_target)
            else:
                copy_file(src, os.path.join(self.dist_target, dst))

        if not self.keep_temp:
            remove_tree(self.build_base, dry_run=self.dry_run)