Esempio n. 1
0
def export(user, directory=None, warnings=True):
    """
    Build a temporary directory with the visualization.
    Returns the local path where files have been written.

    Examples
    --------

        >>> bandicoot.visualization.export(U)
        Successfully exported the visualization to /tmp/tmpsIyncS

    """
    # Get dashboard directory
    current_file = os.path.realpath(__file__)
    current_path = os.path.dirname(current_file)
    dashboard_path = os.path.join(current_path, '../dashboard_src')

    # Create a temporary directory if needed and copy all files
    if directory:
        dirpath = directory
    else:
        dirpath = tempfile.mkdtemp()

    # Copy all files except source code
    copy_tree(dashboard_path + '/public', dirpath, update=1)

    # Export indicators
    data = user_data(user)
    bc.io.to_json(data, dirpath + '/data/bc_export.json', warnings=False)

    if warnings:
        print("Successfully exported the visualization to %s" % dirpath)

    return dirpath
Esempio n. 2
0
    def copyHtml(self):
        htmlDir = os.path.join(self.LOCALEDIR, "html")

        try:
            copy_tree(self.HTMLDIR, htmlDir)
        except Exception, e:
            self.raiseError("Unable to copy html from '%s': %s." % (self.HTMLDIR, e))
Esempio n. 3
0
def install():
    autotools.rawInstall("INSTALL_PREFIX=%s MANDIR=/usr/share/man" % get.installDIR())

    # Rename conflicting manpages
    pisitools.rename("/usr/share/man/man1/passwd.1", "ssl-passwd.1")
    pisitools.rename("/usr/share/man/man3/rand.3", "ssl-rand.3")
    pisitools.rename("/usr/share/man/man3/err.3", "ssl-err.3")

    if get.buildTYPE() == "_emul32":
        from distutils.dir_util import copy_tree
        copy_tree("%s/_emul32/lib32/" % get.installDIR(), "%s/usr/lib32" % get.installDIR())
        pisitools.removeDir("/_emul32")
        pisitools.remove("/usr/lib32/*.a")
        path = "%s/usr/lib32/pkgconfig" % get.installDIR()
        for f in shelltools.ls(path): pisitools.dosed("%s/%s" % (path, f), "^(prefix=\/)_emul32", r"\1usr")
        return

    # Move engines to /usr/lib/openssl/engines
    pisitools.dodir("/usr/lib/openssl")
    pisitools.domove("/usr/lib/engines", "/usr/lib/openssl")

    # Certificate stuff
    pisitools.dobin("tools/c_rehash")
    pisitools.dosym("/etc/pki/tls/certs/ca-bundle.crt","/etc/pki/tls/cert.pem")


    # Create CA dirs
    for cadir in ["CA", "CA/private", "CA/certs", "CA/crl", "CA/newcerts"]:
        pisitools.dodir("/etc/pki/%s" % cadir)

    # No static libs
    pisitools.remove("/usr/lib/*.a")

    pisitools.dohtml("doc/*")
    pisitools.dodoc("CHANGES*", "FAQ", "LICENSE", "NEWS", "README", "doc/*.txt")
Esempio n. 4
0
    def _copy_sdk_template(self, gen_api_root, package_type):
        """ Copy sdk template to gen_api_root directory.

        Args:
            gen_api_root (str): Root directory for generated APIs.
            package_type (str): Sdk template to copied from.
                            Valid options are: core, packages.
        """
        target_dir = os.path.join(self.ydk_root, 'sdk', self.language)
        if self.language == 'python':
            if package_type == 'service':
                service_name = gen_api_root.split('-')[-1]
                target_dir = os.path.join(target_dir, service_name)
            else:
                target_dir = os.path.join(target_dir, package_type)
        elif self.language == 'cpp':
            if package_type == 'packages':
                target_dir = os.path.join(target_dir, package_type)
            elif package_type == 'core':
                target_dir = os.path.join(target_dir, 'core')
            elif package_type == 'service':
                service_name = gen_api_root.split('-')[-1]
                target_dir = os.path.join(target_dir, service_name)
        elif self.language == 'go':
            if package_type == 'service':
                service_name = gen_api_root.split('-')[-1]
                target_dir = os.path.join(target_dir, service_name)
            else:
                target_dir = os.path.join(target_dir, package_type)

        shutil.rmtree(gen_api_root)
        logger.debug('Copying %s to %s' % (target_dir, gen_api_root))
        dir_util.copy_tree(target_dir, gen_api_root)
Esempio n. 5
0
def install():
    pisitools.dodir("/usr/share")

    wanteddirs = []
    for file_ in shelltools.ls(get.workDIR()):
        if shelltools.isDirectory(file_) and not "texmf" in file_:
            wanteddirs.append(file_)

    for folder in wanteddirs:
        pisitools.insinto("/usr/share/texmf-dist", folder)

    if shelltools.can_access_directory("texmf-dist"):
        # Recursively copy on directory on top of another, overwrite duplicate files too
        copy_tree("texmf-dist", "%s/usr/share/texmf-dist" % get.installDIR())

    ## chmod of script files
    script_dir = get.installDIR() + "/usr/share/texmf-dist/scripts"
    if shelltools.can_access_directory(script_dir):
        for root, dirs, files in os.walk(script_dir):
            for name in files:
                shelltools.chmod(os.path.join(root, name), 0755)

    # copy config file to texmf-config
    pisitools.dodir("/etc/texmf/tex/context/config")
    shelltools.copy("%s/usr/share/texmf-dist/tex/context/config/cont-usr.tex" % get.installDIR(), \
                    "%s/etc/texmf/tex/context/config/cont-usr.tex" % get.installDIR())

    # old packages, we will not provide them
    pisitools.remove("/usr/share/texmf-dist/tex/plain/config/omega.ini")
    pisitools.remove("/usr/share/texmf-dist/tex/plain/config/aleph.ini")
    pisitools.removeDir("/usr/share/texmf-dist/scripts/context/stubs/mswin/")
Esempio n. 6
0
def fetch(in_base, in_id,
	out_base='/var/tmp/repositorg/',
	in_id_is_file=False,
	in_path='',
	):
	"""Fetch data and reposit verbatim inside repositorg temporal directory

	Parameters
	----------
	in_base : str
		Base input directory from which to copy files.
	in_id : str
		UUID of the source device.
		This string will be added as a subidrectory under the `out_base` directory.
	out_base : str, optional
		Output base directory.
		A subdirectory named according to the value of `in_id` will be created under this directory to contain the output files.
	in_id_is_file : bool, optional
		Whether the value passed to `in_id` is a file name or path.
		If `True`, the `in_id` will be stripped of the extension and its basename will be extracted.
	in_path : str, optional
		An additional subpath to be added under the `in_base` directory.
	"""

	if in_id_is_file:
		in_id = os.path.basename(in_id)
		in_id = os.path.splitext(in_id)[0]
	in_path = os.path.join(in_base,in_path)
	out_path = os.path.join(out_base,in_id)

	if not os.path.isdir(in_path):
		return False
	dir_util.copy_tree(in_path, out_path,
		preserve_mode=0
		)
Esempio n. 7
0
  def constructPreBuiltHTMITFakeroot(self):
    """
    Construct fakeroot from prebuilt htm-it

    :returns: SHA of the products repo in the fakeroot
    :rtype: tuple
    """

    config = self.config
    logger = self.logger
    productsDirectory = self.productsDirectory
    logger.debug("Creating %s", productsDirectory)
    mkpath(productsDirectory)
    copy_tree(config.productsDir, productsDirectory)
    iteration = git.getCommitCount(productsDirectory, logger=logger)

    with changeToWorkingDir(productsDirectory):
      actualSHA = git.getCurrentSha(logger=logger)

    # Set extra python path
    self.setPythonPath()

    # Clean HTM-IT Scripts
    self.cleanScripts()

    # Purge anything not whitelisted
    self.purgeBlacklistedStuff()

    return (iteration, actualSHA)
def openTestProject(name):
    orgPath = os.path.join(os.path.dirname(__file__), "data", "projects", name)
    destPath = tempFolderInTempFolder()
    copy_tree(orgPath, destPath)
    projectFile = os.path.join(destPath, name + ".qgs")
    if projectFile != QgsProject.instance().fileName():
        iface.addProject(projectFile)
Esempio n. 9
0
    def copy_folder_from(self, source, incremental=False):
        """
        Copies the given source directory to this directory. If incremental is True
        only newer objects are overwritten.

        """ 
        # There is a bug in dir_util that makes copy_tree crash if a folder in the 
        # tree has been deleted before and readded now. To workaround the bug, we first
        # walk the tree and create directories that are needed.
        # 
        # pylint: disable-msg=C0111,W0232          
        target_root = self          
        class _DirCreator:
            @staticmethod
            def visit_folder(folder): 
                target = folder.get_mirror_folder(
                            source.parent, target_root, ignore_root=True)  
                target.make() 
                
        source.walk(_DirCreator)         
        
        dir_util.copy_tree(str(source),
                        self.child(source.name),
                        preserve_symlinks=True,
                        update=incremental)
Esempio n. 10
0
    def updateIWSPlugin(self):
        self.logger.info(u"Performing update of DomoPad" "s IWS plugin...")

        # determine the IWS server directory
        indigoInstallPath = indigo.server.getInstallFolderPath()
        pluginBasePath = os.getcwd()

        mainPluginHome = os.path.join(pluginBasePath, "AndroidClientHelper")
        iwsPluginHome = os.path.join(indigoInstallPath, "IndigoWebServer/plugins/AndroidClientHelper")

        self.logger.info(u"Source IWS directory: " + mainPluginHome)
        self.logger.info(u"Target IWS directory: " + iwsPluginHome)

        # ensure that we have the correct source directory...
        if os.path.exists(mainPluginHome) == False:
            self.logger.error(
                u"ERROR: Source directory not found!  AndroidClientHelper IWS plugin install could not complete."
            )
            return

            # execute the directory copy now...
        try:
            copy_tree(mainPluginHome, iwsPluginHome, preserve_mode=1)
            self.logger.info(
                u"AndroidClientHelper successfully installed/updated. Restarting Indigo IWS server to complete install."
            )
            self.restartIWS()
        except:
            self.logger.error(
                u"Error copying AndroidClientHelper, AndroidClientHelper IWS plugin install could not complete."
            )
Esempio n. 11
0
 def bookmarkCurrentPage(self, dimdimID, newResourceID):
     
     meeting = self.getMeeting(dimdimID)
     resourceList = meeting.get('resources')
     
     currentResourceID = meeting.get('currentResource')
     currentResourceCopy = self.getCurrentResource(dimdimID)
     currentResourceType = self.getResourceType(dimdimID, currentResourceID)
     
     currentPageNum = currentResourceCopy.get('currentPage')
     currentPageCopy = self.getCurrentPage(dimdimID)
     
     newResource = {'currentPage':0, 'lastPage':0, 'pages': {0:currentPageCopy}}
     
     sourcePath = os.path.join(os.path.join(os.path.join(os.path.join(osconfig.cobArchive(), dimdimID), \
                                            currentResourceType), currentResourceID), str(currentPageNum))
     destPath = os.path.join(os.path.join(os.path.join(os.path.join(osconfig.cobArchive(), dimdimID), \
                                            currentResourceType), newResourceID), '0')
     try:
         shutil.rmtree(destPath)
     except:
         pass
     
     dir_util.copy_tree(sourcePath, destPath)
     typeList = resourceList.get(currentResourceType)
     
     self.stateLock.acquire()
     typeList.update({newResourceID : newResource})
     self.exportStateMachine(dimdimID)
     self.stateLock.release()
     
     return
    def handle(self, dump_path, **options):
        if dump_path == "-":
            arc = tarfile.open(fileobj=sys.stdin, mode="r:gz")
        else:
            arc = tarfile.open(dump_path, mode="r:gz")
        base_path = tempfile.mkdtemp()
        arc.extractall(path=base_path)
        path = glob(os.path.join(base_path, "*"))[0]

        # media files
        # shutil.copytree(os.path.join(path, 'media'), settings.MEDIA_ROOT)
        dir_util.copy_tree(os.path.join(path, "media"), settings.MEDIA_ROOT)

        # load db fields
        old_stdout = sys.stdout
        sys.stdout = open(os.path.join(path, "backup_db_dump.json"), "w")
        call_command("dumpdata", indent=4)
        sys.stdout.close()
        sys.stdout = old_stdout
        call_command("flush", noinput=True, interactive=False)
        call_command("reset", "contenttypes", "auth", noinput=True, interactive=False)
        call_command("loaddata", os.path.join(path, "db_dump.json"))

        # rebase FilepathFields
        call_command("rebase_filepathfields", os.path.join(path, "fpf_bases_dump.json"))
Esempio n. 13
0
def install():
    pisitools.dodir("/usr/share")

    wanteddirs = []
    for file_ in shelltools.ls('.'):
        if shelltools.isDirectory(file_) and not "texmf" in file_:
            wanteddirs.append(file_)

    for folder in wanteddirs:
        pisitools.insinto("/usr/share/texmf-dist", folder)

    if shelltools.can_access_directory("texmf-dist"):
        # Recursively copy on directory on top of another, overwrite duplicate files too
        copy_tree("texmf-dist", "%s/usr/share/texmf-dist" % get.installDIR())

    ## chmod of script files
    script_dir = get.installDIR() + "/usr/share/texmf-dist/scripts"
    if shelltools.can_access_directory(script_dir):
        for root, dirs, files in os.walk(script_dir):
            for name in files:
                shelltools.chmod(os.path.join(root, name), 0755)

    pisitools.remove("/usr/share/texmf-dist/scripts/m-tx/m-tx.lua")
    pisitools.remove("/usr/share/texmf-dist/scripts/musixtex/musixtex.lua")
    pisitools.remove("/usr/share/texmf-dist/scripts/musixtex/musixflx.lua")
    pisitools.remove("/usr/share/texmf-dist/scripts/pmx/pmx2pdf.lua")
Esempio n. 14
0
def extract_rpclib(args):
    """
    THE extraction function
    """
    workdir = tempfile.mkdtemp()
    rpclib_root = args.rpclib_root
    new_rpclib_root = tempfile.mkdtemp()
    copy_tree(rpclib_root, new_rpclib_root)
    patch_rpclib(new_rpclib_root)
    copy_important_files(workdir, new_rpclib_root)
    version = extract_rpclib_version(new_rpclib_root)
    shutil.rmtree(new_rpclib_root)
    cmake_template = Template(
        filename=os.path.join(os.path.dirname(os.path.realpath(__file__)), "rpc_CMakeLists.txt"))
    real_cmake_file = cmake_template.render(
        rpclib_major_version=version[0],
        rpclib_minor_version=version[1],
        rpclib_patch_version=version[2]
    )
    with open(os.path.join(workdir, "CMakeLists.txt"), "w") as cmake_file:
        cmake_file.write(real_cmake_file)
        cmake_file.truncate()
    fix_naming(workdir)
    copy_tree(workdir, args.rpclib_target, update=True)
    shutil.rmtree(workdir)
Esempio n. 15
0
    def run(self):
        if sys.platform not in ("linux2", "win32"):
            msg = "**Error: Can't install on this platform: %s" % sys.platform
            print msg
            sys.exit(1)

        if not self.single_version_externally_managed:
            self.do_egg_install()
        else:
            _install.run(self)

        # check if someone else is copying the files to the destination
        # if self.single_version_externally_managed:# and not self.root:
        #    return

        # install bauble.desktop and icons
        if sys.platform == "linux2":
            # install everything in share
            dir_util.copy_tree(os.path.join(self.build_base, "share"), os.path.join(self.install_data, "share"))
        elif sys.platform == "win32":
            # install only i18n files
            locales = os.path.dirname(locale_path)
            install_cmd = self.get_finalized_command("install")
            build_base = install_cmd.build_base
            src = os.path.join(build_base, locales)
            dir_util.copy_tree(src, os.path.join(self.install_data, locales))
Esempio n. 16
0
def windows_package(args):
    pkgfile = 'windows_package.7z'
    pkgdir = os.path.abspath('windows_package')
    logging.info("Packaging libraries and headers in package: %s", pkgfile)
    j = os.path.join
    pkgdir_lib = os.path.abspath(j(pkgdir, 'lib'))
    with remember_cwd():
        os.chdir(args.output)
        logging.info("Looking for static libraries and dlls in: \"%s", os.getcwd())
        libs = list(glob.iglob('**/*.lib', recursive=True))
        dlls = list(glob.iglob('**/*.dll', recursive=True))
        os.makedirs(pkgdir_lib, exist_ok=True)
        for lib in libs:
            logging.info("packing lib: %s", lib)
            shutil.copy(lib, pkgdir_lib)
        for dll in dlls:
            logging.info("packing dll: %s", dll)
            shutil.copy(dll, pkgdir_lib)
        os.chdir(get_mxnet_root())
        logging.info('packing python bindings')
        copy_tree('python', j(pkgdir, 'python'))
        logging.info('packing headers')
        copy_tree('include', j(pkgdir, 'include'))
        logging.info("Compressing package: %s", pkgfile)
        check_call(['7z', 'a', pkgfile, pkgdir])
Esempio n. 17
0
 def add_local_binaries_to_stage(self):
     progress("Adding local binaries to staging directory")
     safe_make_directory("appleseed/bin")
     dir_util.copy_tree(os.path.join(self.settings.appleseed_path, "sandbox/bin", self.settings.configuration), "appleseed/bin/")
     shutil.copy(os.path.join(self.settings.appleseed_path, "sandbox/bin", exe("maketx")), "appleseed/bin/")
     shutil.copy(os.path.join(self.settings.appleseed_path, "sandbox/bin", exe("oslc")), "appleseed/bin/")
     shutil.copy(os.path.join(self.settings.appleseed_path, "sandbox/bin", exe("oslinfo")), "appleseed/bin/")
Esempio n. 18
0
        def run(self):
            # TODO: make sure we have everything installed that we need to
            # bundle e.g. sqlite, psycopg2, others...
            _py2exe_cmd.run(self)
            # install locale files
            locales = os.path.dirname(locale_path)
            build_base = self.get_finalized_command("build").build_base
            # print build_base
            src = os.path.join(build_base, locales)
            dir_util.copy_tree(src, os.path.join(self.dist_dir, locales))

            # copy GTK to the dist directory, assuming PyGTK
            # all-in-one installer
            gtk_root = "c:\\python27\\lib\\site-packages\\gtk-2.0\\runtime"
            dist_gtk = os.path.join(self.dist_dir, "gtk")
            import shutil

            if not os.path.exists(dist_gtk):
                ignore = shutil.ignore_patterns("src", "gtk-doc", "icons", "man", "demo", "aclocal", "doc", "include")
                shutil.copytree(gtk_root, dist_gtk, ignore=ignore)

            # register the pixbuf loaders
            exe = "%s\\bin\\gdk-pixbuf-query-loaders.exe" % dist_gtk
            dest = "%s\\etc\\gtk-2.0\\gdk-pixbuf.loaders" % dist_gtk
            cmd = 'call "%s" > "%s"' % (exe, dest)
            print cmd
            os.system(cmd)

            # copy the the MS-Windows gtkrc to make it the default theme
            rc = "%s\\share\\themes\\MS-Windows\\gtk-2.0\\gtkrc" % dist_gtk
            dest = "%s\\etc\\gtk-2.0" % dist_gtk
            file_util.copy_file(rc, dest)
Esempio n. 19
0
def copy_sources():
    """Copy the C sources into the source directory.
    This rearranges the source files under the python distribution
    directory.
    """
    src = []

    try:
        dir_util.remove_tree("src/")
    except (IOError, OSError):
        pass

    dir_util.copy_tree("../../arch", "src/arch/")
    dir_util.copy_tree("../../include", "src/include/")

    src.extend(glob.glob("../../*.[ch]"))
    src.extend(glob.glob("../../*.mk"))

    src.extend(glob.glob("../../Makefile"))
    src.extend(glob.glob("../../LICENSE*"))
    src.extend(glob.glob("../../README"))
    src.extend(glob.glob("../../*.TXT"))
    src.extend(glob.glob("../../RELEASE_NOTES"))
    src.extend(glob.glob("../../make.sh"))
    src.extend(glob.glob("../../CMakeLists.txt"))

    for filename in src:
        outpath = os.path.join("./src/", os.path.basename(filename))
        log.info("%s -> %s" % (filename, outpath))
        shutil.copy(filename, outpath)
Esempio n. 20
0
def install():
    """Install MLTSP config file and create data folders.

    Copies mltsp.yaml.example to ~/.config/mltsp/mltsp.yaml and creates data
    directories as described in `mltsp.config.cfg['paths']`
    """
    import os
    import shutil
    from distutils.dir_util import copy_tree

    data_src = os.path.join(os.path.dirname(__file__), "data")
    data_dst = os.path.expanduser('~/.local/mltsp/')
    copy_tree(data_src, data_dst, update=1)
    print("Created data directory at {} and copied sample data.".format(
        os.path.expanduser('~/.local/mltsp/')))

    cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml')
    cfg_dir = os.path.dirname(cfg)

    if os.path.exists(cfg):
        print('Existing configuration at {} -- not overwriting.'.format(cfg))
        return

    if not os.path.exists(cfg_dir):
        os.makedirs(cfg_dir)

    shutil.copyfile(os.path.join(os.path.dirname(__file__),
                                 'mltsp.yaml.example'),
                    cfg)

    print('Installed {}'.format(cfg))
    print('Please customize this file with authentication tokens, etc.')
Esempio n. 21
0
def copy_static(args):
    if '../' in args['html_folder']:
        dir_util.copy_tree(os.path.join(os.path.dirname(__file__), 'static'),
                           os.path.join(os.path.abspath(args['html_folder']), 'static'))
    else:
        dir_util.copy_tree(os.path.join(os.path.dirname(__file__), 'static'),
                           os.path.join(os.path.abspath(args['html_folder']), 'static'))
Esempio n. 22
0
    def __deploy_app(self):
        """
            Deploy the local SO bundle
            assumption here
            - a git repo is returned
            - the bundle is not managed by git
        """
        # create temp dir...and clone the remote repo provided by OpS
        tmp_dir = tempfile.mkdtemp()
        LOG.debug('Cloning git repository: ' + self.repo_uri + ' to: ' + tmp_dir)
        cmd = ' '.join(['git', 'clone', self.repo_uri, tmp_dir])
        os.system(cmd)

        # Get the SO bundle
        bundle_loc = CONFIG.get('service_manager', 'bundle_location', '')
        if bundle_loc == '':
            raise Exception('No bundle_location parameter supplied in sm.cfg')
        LOG.debug('Bundle to add to repo: ' + bundle_loc)
        dir_util.copy_tree(bundle_loc, tmp_dir)

        self.__add_openshift_files(bundle_loc, tmp_dir)

        # add & push to OpenShift
        os.system(' '.join(['cd', tmp_dir, '&&', 'git', 'add', '-A']))
        os.system(' '.join(['cd', tmp_dir, '&&', 'git', 'commit', '-m', '"deployment of SO for tenant ' +
                            self.extras['tenant_name'] + '"', '-a']))
        LOG.debug('Pushing new code to remote repository...')
        os.system(' '.join(['cd', tmp_dir, '&&', 'git', 'push']))

        shutil.rmtree(tmp_dir)
Esempio n. 23
0
def _copy_resources(template, instance, resource, dir=False):
    if dir:
        src = path.join(template, resource)
        dst = path.join(instance, resource)
        copy_tree(src, dst)
    else:
        raise NotImplementedError()
Esempio n. 24
0
 def mergeDir(self, path, dest):
     """
     We don't want to delete the old dir, since it might contain third
     party plugin content from previous installations; we simply want to
     merge the existing directory with the new one.
     """
     copy_tree(path, os.path.join(dest, path), preserve_symlinks=True)
 def copy_base_course(self):
     """ Create all the base files needed to restore the course. """
     print '  > Creating course base files... ',
     if not os.path.exists(self.course_dir):
         os.makedirs(self.course_dir)
     copy_tree(settings.BASE_COURSE, self.course_dir)
     print 'Done.'
Esempio n. 26
0
def pyinstall(source_folder):
    pyinstaller_path = os.path.join(os.path.curdir, 'pyinstaller')
    _install_pyintaller(pyinstaller_path)

    try:
        shutil.rmtree(os.path.join(pyinstaller_path, 'conan'))
    except Exception as e:
        print("Unable to remove old folder", e)
    try:
        shutil.rmtree(os.path.join(pyinstaller_path, 'conan_server'))
    except Exception as e:
        print("Unable to remove old server folder", e)

    conan_path = os.path.join(source_folder, 'conans', 'conan.py')
    conan_server_path = os.path.join(source_folder, 'conans', 'conan_server.py')
    subprocess.call('python pyinstaller.py -y -p %s --console %s' % (source_folder, conan_path),
                    cwd=pyinstaller_path, shell=True)
    _run_bin(pyinstaller_path)

    subprocess.call('python pyinstaller.py -y -p %s --console %s'
                    % (source_folder, conan_server_path),
                    cwd=pyinstaller_path, shell=True)

    conan_bin = os.path.join(pyinstaller_path, 'conan', 'dist', 'conan')
    conan_server_folder = os.path.join(pyinstaller_path, 'conan_server', 'dist', 'conan_server')
    dir_util.copy_tree(conan_server_folder, conan_bin)
    _run_bin(pyinstaller_path)

    return os.path.abspath(os.path.join(pyinstaller_path, 'conan', 'dist', 'conan'))
Esempio n. 27
0
def deploy(config, host):

    # add -H command to set home directory of target user
    new_sudo_command = "sudo -H -S -p '%(sudo_prompt)s'"

    print colors.green('change sudo_prefix from "%s" to "%s"' % (env['sudo_prefix'], new_sudo_command))

    with settings(sudo_prefix=new_sudo_command):

        fixtures_dir = tempfile.mkdtemp()

        print colors.green('create fixtures directory: "%s"' % fixtures_dir)

        try:
            print colors.green('merge fixtures')
            for source_fixtures_dir in reversed(config.fixtures):
                dir_util.copy_tree(source_fixtures_dir, fixtures_dir)

            config.fixtures_path = fixtures_dir

            host.prepair()
            host.check()
            host.setup()

        except Exception, e:
            traceback.print_exc()
            print colors.red('Exception while deploing: "%s"' % e)
        finally:
Esempio n. 28
0
def aggregate(outputdir=OUTPUT_DIR, buildname=DEFAULT_BUILD_NAME):
    """
    Aggregates the captures from various campaigns to the 'default'
    :return:
    """
    if not os.path.exists(outputdir):
        raise ValueError("outputdir does not exist at %s!" % outputdir)

    # Aggregate all the capture job data
    outputdir = str(outputdir).rstrip('/')
    buildpaths = glob.glob(outputdir + '/*/')
    aggregate_path = os.path.join(outputdir, buildname)

    if not os.path.exists(aggregate_path):
        LOGGER.debug("Creating path for aggregates at %s", aggregate_path)
        os.makedirs(aggregate_path)

    LOGGER.info("Aggregating build data to %s", aggregate_path)
    # Workaround bug with dir_util
    # See http://stackoverflow.com/questions/9160227/
    dir_util._path_created = {}
    for buildpath in buildpaths:
        if str(buildpath).endswith(DEFAULT_BUILD_NAME + "/"):
            # Don't do this for the default build
            LOGGER.debug("Skipping default path: %s", buildpath)
            continue

        sourcepath = os.path.join(outputdir, buildpath)
        LOGGER.debug("Copying from %s to %s", buildpath,
                     aggregate_path)

        dir_util.copy_tree(sourcepath, aggregate_path, update=1)
    return aggregate_path
Esempio n. 29
0
def cp(source, target, force=True):
    if isinstance(source, list):  # list
        # copy files to dir
        targets = []
        for s in source:
            t = cp(s, target, force)
            targets.append(t)
        return targets
    assert_exists(source)  # assert exists
    if not force and os.path.exists(target):
        return
    if source == target:
        return target
    if os.path.isfile(source) and os.path.isdir(target):
        # target is DIR
        target = os.path.join(target, os.path.basename(source))
    if os.path.isfile(source) or os.path.islink(source):
        if (os.path.exists(target) or os.path.lexists(target)):
            if os.path.isfile(source) != os.path.isfile(target):
                os.unlink(target)
        shutil.copy(source, target)
    if os.path.isdir(source):
        # first create dirs
        if not os.path.exists(target):
            os.makedirs(target)
        dir_util.copy_tree(source, target)
    return target
Esempio n. 30
0
    def handle(self, *args, **options):
        app_name = args[0]
        module = importlib.import_module(app_name)
        path = os.path.dirname(module.__file__) + os.sep
        
        project_path = os.path.dirname(os.path.normpath(os.sys.modules[settings.SETTINGS_MODULE].__file__))
        
        install_app('social_auth')

        copy_tree(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/socialauth/templates/accounts', path + '/templates/accounts', update=True)

        copy_file(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/socialauth/controllers/accounts.py', path + '/controllers', update=True)
        copy_file(os.path.dirname(djangobp.__file__) + os.sep + 'scaffold/scaffold/socialauthsettings.py', project_path, update=True)

        urls_edit = CodeEditor(path + 'urls.py')
        urls_edit.insert_line("url(r'', include('social_auth.urls')),", 'urlpatterns')
        urls_edit.commit()

        settings_edit = CodeEditor(project_path + os.sep + 'settings.py')
        settings_edit.append_line("from socialauthsettings import *")
        settings_edit.commit()
        
        # TODO copy controllers/accounts.py
        # TODO copy templates/accounts/login.html
        # TODO urls social auth
        # TODO django-social-auth settings
        
        
Esempio n. 31
0
def copy_directory(src, dst, clear_dst=False):
    ensure_directory(dst)
    if clear_dst:
        clear_directory(dst)
    from distutils.dir_util import copy_tree
    copy_tree(src, dst)
    def main(self):
        begin = time.time()
        source_language = "en"
        total_char = 0
        labs = [f.path for f in os.scandir(self.src_dir) if f.is_dir()]
        for lab in labs:
            print("Translating lab manual: {}".format(lab))
            start_time = time.time()
            for target_language_code in self.codes:
                start_time_lab = time.time()
                print(
                    "##########################################################"
                )
                print("Translating content from {}  to {}".format(
                    source_language, target_language_code))
                with open(str(lab) + '/content.xml') as f:
                    tree = ET.parse(f)
                    root = tree.getroot()
                    len_of_text = 0
                for elem in root.getiterator():
                    text_translate = elem.text
                    tag = elem.tag
                    if text_translate is None or text_translate.isspace():
                        continue
                    else:
                        self.logger.info(
                            "---------------------Start of text----------------------------------"
                        )
                        self.logger.info(
                            "-----------------------translating text-----------------------------"
                        )
                        curr_len = len(text_translate.encode('utf-8'))
                        total_char += curr_len

                        if tag == 'name' or tag == 'dataFormat':
                            continue
                        if tag == 'defaultLanguageCode':
                            elem.text = str(target_language_code)
                            continue
                        result = self.language_translation(
                            text=text_translate,
                            source_language="en",
                            target_language=target_language_code,
                            model=self.modelType)

                        if self.modelType == 'custom':
                            self.logger.info(result)
                            elem.text = str(result)
                        else:
                            self.logger.info(result['translatedText'])
                            elem.text = str(result['translatedText'])
                        len_of_text += curr_len

                        self.logger.info(
                            "-----------------------translation complete-----------------------------"
                        )
                        self.logger.info(
                            "---------------------End of text----------------------------------"
                        )
                        self.logger.info("")

                if not os.path.isdir(self.dst_dir + '/' + str(lab[7:-2]) +
                                     target_language_code):
                    os.mkdir(self.dst_dir + '/' + str(lab[7:-2]) +
                             target_language_code)
                    os.mkdir(self.dst_dir + '/' + str(lab[7:-2]) +
                             target_language_code + '/images')
                tree.write(self.dst_dir + '/' + str(lab[7:-2]) +
                           target_language_code + '/content.xml',
                           encoding="UTF-8")
                copy_tree(
                    str(lab) + '/images', self.dst_dir + '/' + str(lab[7:-2]) +
                    target_language_code + '/images')
                end_time = time.time()
                print(
                    "Total elapsed time to translate the xml document is: {} minutes"
                    .format((end_time - start_time_lab) / 60))
                is_upload = self.upload_s3(
                    self.dst_dir + '/' + str(lab[7:-2]) +
                    target_language_code + '.zip', self.dst_dir,
                    self.bucket_name,
                    str(lab[7:-2]) + "all_translations",
                    str(lab[7:-2]) + target_language_code)
                if not is_upload:
                    print("Upload failed....Exiting!!")
                    exit(0)
            final_end_time = time.time()
            print(
                "Total characters processed in this translating lab {} are {} chars"
                .format(lab, total_char))
            print("Total Translation time {} minutes".format(
                (final_end_time - start_time) / 60))
        end = time.time()
        print("Total execution time for all labs: {} hrs".format(
            (end - begin) / 3600))
Esempio n. 33
0
def main():
    if len(sys.argv) < 2 or sys.argv[1].startswith('-'):
        print usage % (aliases, ets_package_names)
        return

    arg1 = sys.argv[1]

    # Update the gh-pages branch
    if arg1 == 'update':
        if 2 < len(sys.argv):
            ets_packages = sys.argv[2:]
        else:
            ets_packages = ets_package_names.split()

        for ets_pkg_name in ets_packages:
            print "Updating documentation branch for {0}...".format(
                ets_pkg_name)

            # Find the current branch, so that we may return to it
            branches = subprocess.check_output(['git', 'branch'],
                                               cwd=ets_pkg_name)
            current_branch = [
                line.split()[1] for line in branches.splitlines()
                if line.startswith('*')
            ]
            current_branch = current_branch[0]

            # Checkout the gh-pages branch
            try:
                subprocess.check_call(['git', 'checkout', 'gh-pages'],
                                      cwd=ets_pkg_name)
            except (OSError, subprocess.CalledProcessError), detail:
                print "   Error running command in package %s:\n   %s" % (
                    ets_pkg_name, detail)
                raw_input("   Press enter to process remaining packages.")
                continue

            # Copy the files over
            print "Copying files for {0}".format(ets_pkg_name)
            if ets_pkg_name == 'mayavi':
                copy_tree(ets_pkg_name + '/docs/build/tvtk/html/',
                          ets_pkg_name + '/tvtk/')
                copy_tree(ets_pkg_name + '/docs/build/mayavi/html/',
                          ets_pkg_name + '/mayavi/')
            else:
                copy_tree(ets_pkg_name + '/docs/build/html/', ets_pkg_name)

            # Add everything to the repository
            try:
                subprocess.check_call(['git', 'add', '.'], cwd=ets_pkg_name)
            except (OSError, subprocess.CalledProcessError), detail:
                print "   Error running command in package %s:\n   %s" % (
                    ets_pkg_name, detail)
                raw_input("   Press enter to process remaining packages.")
                continue

            # Commit to the repo.
            try:
                subprocess.check_call(
                    ['git', 'commit', '-a', '-m', '"Updated docs."'],
                    cwd=ets_pkg_name)
            except (OSError, subprocess.CalledProcessError), detail:
                print "   Error running command in package %s:\n   %s" % (
                    ets_pkg_name, detail)
                raw_input("   Press enter to process remaining packages.")
                continue
Esempio n. 34
0
    def _compare_baseline(self):
        with self._test_status:
            if int(self._case.get_value("RESUBMIT")) > 0:
                # This is here because the comparison is run for each submission
                # and we only want to compare once the whole run is finished. We
                # need to return a pass here to continue the submission process.
                self._test_status.set_status(CIME.test_status.BASELINE_PHASE,
                                             CIME.test_status.TEST_PASS_STATUS)
                return

            self._test_status.set_status(CIME.test_status.BASELINE_PHASE,
                                         CIME.test_status.TEST_FAIL_STATUS)

            run_dir = self._case.get_value("RUNDIR")
            case_name = self._case.get_value("CASE")
            base_dir = os.path.join(
                self._case.get_value("BASELINE_ROOT"),
                self._case.get_value("BASECMP_CASE"),
            )

            test_name = "{}".format(case_name.split(".")[-1])
            evv_config = {
                test_name: {
                    "module": os.path.join(evv_lib_dir, "extensions", "ks.py"),
                    "test-case": "Test",
                    "test-dir": run_dir,
                    "ref-case": "Baseline",
                    "ref-dir": base_dir,
                    "var-set": "default",
                    "ninst": NINST,
                    "critical": 13,
                    "component": self.component,
                }
            }

            json_file = os.path.join(run_dir, ".".join([case_name, "json"]))
            with open(json_file, "w") as config_file:
                json.dump(evv_config, config_file, indent=4)

            evv_out_dir = os.path.join(run_dir, ".".join([case_name, "evv"]))
            evv(["-e", json_file, "-o", evv_out_dir])

            with open(os.path.join(evv_out_dir, "index.json")) as evv_f:
                evv_status = json.load(evv_f)

            comments = ""
            for evv_elem in evv_status["Data"]["Elements"]:
                if (evv_elem["Type"] == "ValSummary" and evv_elem["TableTitle"]
                        == "Kolmogorov-Smirnov test"):
                    comments = "; ".join("{}: {}".format(key, val)
                                         for key, val in evv_elem["Data"]
                                         [test_name][""].items())
                    if evv_elem["Data"][test_name][""]["Test status"].lower(
                    ) == "pass":
                        self._test_status.set_status(
                            CIME.test_status.BASELINE_PHASE,
                            CIME.test_status.TEST_PASS_STATUS,
                        )
                    break

            status = self._test_status.get_status(
                CIME.test_status.BASELINE_PHASE)
            mach_name = self._case.get_value("MACH")
            mach_obj = Machines(machine=mach_name)
            htmlroot = CIME.utils.get_htmlroot(mach_obj)
            urlroot = CIME.utils.get_urlroot(mach_obj)
            if htmlroot is not None:
                with CIME.utils.SharedArea():
                    dir_util.copy_tree(
                        evv_out_dir,
                        os.path.join(htmlroot, "evv", case_name),
                        preserve_mode=False,
                    )
                if urlroot is None:
                    urlroot = "[{}_URL]".format(mach_name.capitalize())
                viewing = "{}/evv/{}/index.html".format(urlroot, case_name)
            else:
                viewing = (
                    "{}\n"
                    "    EVV viewing instructions can be found at: "
                    "        https://github.com/E3SM-Project/E3SM/blob/master/cime/scripts/"
                    "climate_reproducibility/README.md#test-passfail-and-extended-output"
                    "".format(evv_out_dir))

            comments = ("{} {} for test '{}'.\n"
                        "    {}\n"
                        "    EVV results can be viewed at:\n"
                        "        {}".format(
                            CIME.test_status.BASELINE_PHASE,
                            status,
                            test_name,
                            comments,
                            viewing,
                        ))

            CIME.utils.append_testlog(comments, self._orig_caseroot)
Esempio n. 35
0
with open(logfile, 'w') as ff:
    ff.write(WORK_DIR)
    ff.write('\n\n\n')

# main directory to be used when running the knife:
KNIFE_DIR = "/srv/software/knife/circularRNApipeline_Standalone"

# place files in appropriate locations; fix code in future to
# avoid this step

anly_src = (KNIFE_DIR + "/analysis")
anly_dst = (RESOURCE_DIR + "/analysis")
anly_dst2 = (WORK_DIR + "/analysis")
if not os.path.exists(anly_dst):
        copy_tree(anly_src, anly_dst)
if not os.path.exists(anly_dst2):
        os.symlink(anly_dst, anly_dst2)

comprun_src = (KNIFE_DIR + "/completeRun.sh")
comprun_dst = (RESOURCE_DIR + "/completeRun.sh")
comprun_dst2 = (WORK_DIR + "/completeRun.sh")
if not os.path.exists(comprun_dst):
        copyfile(comprun_src, comprun_dst)
if not os.path.exists(comprun_dst2):
        os.symlink(comprun_dst, comprun_dst2)

findcirc_src = (KNIFE_DIR + "/findCircularRNA.sh")
findcirc_dst = (RESOURCE_DIR + "/findCircularRNA.sh")
findcirc_dst2 = (WORK_DIR + "/findCircularRNA.sh")
if not os.path.exists(findcirc_dst):
Esempio n. 36
0
    print(line)


print("Compiling with SBT...")
proc = subprocess.Popen('sbt fullOptJS',shell=True)
proc.wait()
print("Moving files to {}...".format(args.output))

from distutils import dir_util,file_util

assets_folder='assets'
output_assets_folder=os.path.join(output_folder,"")
makedirs_ifnot(output_assets_folder)
print("Moving assets folder from '{}' to '{}'...".format(assets_folder,output_assets_folder))

dir_util.copy_tree(assets_folder,output_assets_folder)

compiled_js_folder='target/scala-2.11/'
output_compiled_js_folder=os.path.join(output_folder,"")
makedirs_ifnot(output_compiled_js_folder)

compiled_js_file='vonsim-opt.js'
compiled_js_file_map=compiled_js_file+".map"

compiled_js_filepath=os.path.join(compiled_js_folder,compiled_js_file)
compiled_js_filepath_map=os.path.join(compiled_js_folder,compiled_js_file_map)
output_compiled_js_file=os.path.join(output_folder,compiled_js_file)
output_compiled_js_file_map=os.path.join(output_folder,compiled_js_file_map)
print("Moving compiled js file to '{}' and source map to '{}' ...".format(output_compiled_js_file,output_compiled_js_file_map))
file_util.copy_file(compiled_js_filepath,output_compiled_js_file)
file_util.copy_file(compiled_js_filepath_map,output_compiled_js_file_map)
Esempio n. 37
0
                id = 0
                for r, d, file in os.walk(d):

                    for f in file:
                        fname, fextension = os.path.splitext(f)

                        if fextension.lower() in args.extensions or any(
                                a in fname.lower() for a in args.keywords):
                            #\\?\ extends character limit? Thanks timmy
                            #copy2 to preserve metadata
                            shutil.copy2(
                                os.path.join(r, f),
                                os.path.join(
                                    "\\\?\\" + args.destination[0],
                                    os.path.join(
                                        destination,
                                        fname + "_" + str(id) + fextension)))
                        id += 1
            else:
                dir_util.copy_tree(
                    d,
                    os.path.join(args.destination[0],
                                 str(time.strftime("%Y%m%d-%H%M%S"))))

        if not args.quiet:
            print "Running..."

    time.sleep(1)

    original_drives = temp
targetmhdir = os.path.abspath(os.path.join(exportDir, 'makehuman'))
for d in deleteAfterExport:
    f = os.path.abspath(os.path.join(targetmhdir, d))
    if os.path.exists(f):
        if os.path.isfile(f):
            os.remove(f)
        else:
            shutil.rmtree(f)

pluginsdir = os.path.abspath(os.path.join(targetmhdir, 'plugins'))

mhx2 = os.path.abspath(os.path.join(parentdir, 'mhx2-makehuman-exchange'))
if os.path.exists(mhx2):
    tocopy = os.path.abspath(os.path.join(mhx2, '9_export_mhx2'))
    todest = os.path.abspath(os.path.join(pluginsdir, '9_export_mhx2'))
    copy_tree(tocopy, todest)
else:
    print("MHX2 was not found in parent directory")

asset = os.path.abspath(
    os.path.join(parentdir, 'community-plugins-assetdownload'))
if os.path.exists(asset):
    tocopy = os.path.abspath(os.path.join(asset, '8_asset_downloader'))
    todest = os.path.abspath(os.path.join(pluginsdir, '8_asset_downloader'))
    copy_tree(tocopy, todest)
else:
    print("asset downloader was not found in parent directory")

mhapi = os.path.abspath(os.path.join(parentdir, 'community-plugins-mhapi'))
if os.path.exists(mhapi):
    tocopy = os.path.abspath(os.path.join(mhapi, '1_mhapi'))
Esempio n. 39
0
 def merge(self):
     c = self.app.conf
     if path.isdir(path.join(c.paths.cwd, 'initrd')):
         copy_tree(path.join(c.paths.cwd, 'initrd'), c.paths.initrd)
Esempio n. 40
0
def copytree(src, dst):
    copy_tree(src,dst)
    return
Esempio n. 41
0
 def copy_web_template(self):
     '''Copy the base web asset files from self.directory to ./web'''
     copy_tree(self.template_dir, target_dir)
Esempio n. 42
0
def bcl2fq(config):
    '''
    takes things from /dont_touch_this/solexa_runs/XXX/Data/Intensities/BaseCalls
    and writes most output into config.outputDir/XXX, where XXX is the run ID.
    '''
    lanes = config.get("Options", "lanes")
    if lanes != '':
        lanes = '_lanes{}'.format(lanes)

    #Make the output directories
    os.makedirs("%s/%s%s" % (config.get(
        "Paths", "outputDir"), config.get("Options", "runID"), lanes),
                exist_ok=True)
    #Make log directory
    os.makedirs(
        "%s" % (os.path.join(config.get("Paths", "logDir"),
                             os.path.dirname(config.get("Options", "runID")))),
        exist_ok=True)
    os.makedirs(os.path.join(config.get("Paths", "outputDir"),
                             config.get("Options", "runID"), 'InterOp'),
                exist_ok=True)
    copy_tree(
        os.path.join(config.get("Paths", "baseDir"),
                     config.get("Options", "sequencer"), 'data',
                     config.get("Options", "runID"), 'InterOp'),
        os.path.join(config.get("Paths", "outputDir"),
                     config.get("Options", "runID"), 'InterOp'))
    old_wd = os.getcwd()
    os.chdir(
        os.path.join(config.get('Paths', 'outputDir'),
                     config.get('Options', 'runID')))

    if config.get("Options", "singleCell") == "1":
        #TODO: --interop-dir not supported for cellranger
        cmd = "{cellranger_cmd} --output-dir={output_dir} --sample-sheet={sample_sheet} --run={run_dir} {cellranger_options}".format(
            cellranger_cmd=config.get("cellranger", "cellranger_mkfastq"),
            output_dir="{}/{}".format(config.get("Paths", "outputDir"),
                                      config.get("Options", "runID")),
            sample_sheet=config.get("Options", "sampleSheet"),
            run_dir="{}/{}/data/{}".format(config.get("Paths", "baseDir"),
                                           config.get("Options", "sequencer"),
                                           config.get("Options", "runID")),
            cellranger_options=config.get("cellranger",
                                          "cellranger_mkfastq_options"))
    else:
        cmd = "%s %s --sample-sheet %s -o %s/%s%s -R %s/%s/data/%s --interop-dir %s/%s/InterOp" % (
            config.get("bcl2fastq", "bcl2fastq"),
            config.get("bcl2fastq", "bcl2fastq_options"),
            config.get("Options", "sampleSheet"),
            config.get("Paths", "outputDir"),
            config.get("Options", "runID"),
            lanes,
            config.get("Paths", "baseDir"),
            config.get("Options", "sequencer"),
            config.get("Options", "runID"),
            config.get("Paths", "outputDir"),
            config.get("Options", "runID"),
        )
    syslog.syslog("[bcl2fq] Running: %s\n" % cmd)
    logOut = open(
        "%s/%s%s.log" %
        (config.get("Paths", "logDir"), config.get("Options", "runID"), lanes),
        "w")
    subprocess.check_call(cmd,
                          stdout=logOut,
                          stderr=subprocess.STDOUT,
                          shell=True)
    logOut.close()
    os.chdir(old_wd)
Esempio n. 43
0
def main(args, train_env):
    logging.basicConfig(level=args.logger_level)

    # Set a random seed used in ChainerRL
    misc.set_random_seed(args.seed, gpus=(args.gpu, ))
    if not (args.demo and args.load):
        args.outdir = experiments.prepare_output_dir(args, args.outdir)
    temp = args.outdir.split('/')[-1]
    dst = args.outdir.strip(temp)

    def make_env(test):
        env = gym.make(args.env)
        if test:
            episode_length = args.eval_episode_length
        else:
            episode_length = args.episode_length

        env.initialize_environment(
            case=args.state_rep,
            n_historical_events=args.n_historical_events,
            episode_length=episode_length,
            n_experts=args.n_experts,
            n_demos_per_expert=1,
            n_expert_time_steps=args.length_expert_TS,
            seed_agent=args.seed_agent,
            seed_expert=args.seed_expert,
            adam_days=args.adam_days)

        # Use different random seeds for train and test envs
        env_seed = 2**32 - 1 - args.seed if test else args.seed
        env.seed(env_seed)
        # Cast observations to float32 because our model uses float32
        env = chainerrl.wrappers.CastObservationToFloat32(env)
        if args.monitor:
            env = gym.wrappers.Monitor(env, args.outdir)
        if not test:
            # Scale rewards (and thus returns) to a reasonable range so that
            # training is easier
            env = chainerrl.wrappers.ScaleReward(env, args.reward_scale_factor)
        if args.render:
            env = chainerrl.wrappers.Render(env)
        return env

    sample_env = gym.make(args.env)
    sample_env.initialize_environment(
        case=args.state_rep,
        n_historical_events=args.n_historical_events,
        episode_length=args.episode_length,
        n_experts=args.n_experts,
        n_demos_per_expert=1,
        n_expert_time_steps=args.length_expert_TS,
        seed_agent=args.seed_agent,
        seed_expert=args.seed_expert,
        adam_days=args.adam_days)
    demonstrations = sample_env.generate_expert_trajectories(out_dir=dst,
                                                             eval=False)
    timestep_limit = None  #sample_env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps')  # This value is None

    # Generate expert data for evaluation
    temp_env = gym.make(args.env)
    temp_env.initialize_environment(
        case=args.state_rep,
        n_historical_events=args.n_historical_events,
        episode_length=
        0,  # This parameter does not really matter since we create this env only for generating samples
        n_experts=args.n_experts,
        n_demos_per_expert=1,  # We do not perform any clustering right now
        # n_demos_per_expert=args.n_demos_per_expert,  # How large should the expert cluster be?
        n_expert_time_steps=args.
        eval_episode_length,  # How long should each expert trajectory be?
        seed_expert=args.seed_expert,
        adam_days=args.adam_days)
    temp_env.generate_expert_trajectories(out_dir=dst, eval=True)

    obs_space = sample_env.observation_space
    action_space = sample_env.action_space

    # Normalize observations based on their empirical mean and variance
    if args.state_rep == 1:
        obs_dim = obs_space.low.size
    elif args.state_rep == 2 or args.state_rep == 21 or args.state_rep == 22 or args.state_rep == 24 or args.state_rep == 4 or args.state_rep == 221 or args.state_rep == 222 \
    or args.state_rep == 71 or args.state_rep == 17 or args.state_rep == 81:
        obs_dim = obs_space.n
    elif args.state_rep == 3 or args.state_rep == 11 or args.state_rep == 23 or args.state_rep == 31 or args.state_rep == 7:
        obs_dim = obs_space.nvec.size
    else:
        raise NotImplementedError

    if args.normalize_obs:
        obs_normalizer = chainerrl.links.EmpiricalNormalization(
            obs_dim,
            clip_threshold=5)  # shape: Shape of input values except batch axis
    else:
        obs_normalizer = None

    # Switch policy types accordingly to action space types
    if args.arch == 'FFSoftmax':
        model = A3CFFSoftmax(obs_dim,
                             action_space.n,
                             hidden_sizes=args.G_layers)
    elif args.arch == 'FFMellowmax':
        model = A3CFFMellowmax(obs_space.low.size, action_space.n)
    elif args.arch == 'FFGaussian':
        model = A3CFFGaussian(obs_space.low.size,
                              action_space,
                              bound_mean=args.bound_mean)

    opt = chainer.optimizers.Adam(alpha=args.lr, eps=10e-1)
    opt.setup(model)

    if args.show_D_dummy:  # Let discriminator see dummy
        input_dim_D = obs_dim + 1
    elif not args.show_D_dummy:  # Do not let discriminator see dummy
        if args.state_rep == 21 or args.state_rep == 17:
            input_dim_D = obs_dim + 1
        else:
            input_dim_D = obs_dim + 1 - args.n_experts

    if args.weight_decay > 0:
        opt.add_hook(NonbiasWeightDecay(args.weight_decay))
    if args.algo == 'ppo':
        agent = PPO(
            model,
            opt,
            obs_normalizer=obs_normalizer,
            gpu=args.gpu,
            update_interval=args.update_interval,
            minibatch_size=args.batchsize,
            epochs=args.epochs,
            clip_eps_vf=None,
            entropy_coef=args.entropy_coef,
            standardize_advantages=args.standardize_advantages,
        )
    elif args.algo == 'gail':
        from customer_behaviour.algorithms.irl.gail import GAIL as G
        from customer_behaviour.algorithms.irl.gail import Discriminator as D

        demonstrations = np.load(dst + '/expert_trajectories.npz')
        D = D(gpu=args.gpu,
              input_dim=input_dim_D,
              hidden_sizes=args.D_layers,
              loss_type=args.loss_type)

        agent = G(env=sample_env,
                  demonstrations=demonstrations,
                  discriminator=D,
                  model=model,
                  optimizer=opt,
                  obs_normalizer=obs_normalizer,
                  gpu=args.gpu,
                  update_interval=args.update_interval,
                  minibatch_size=args.batchsize,
                  epochs=args.epochs,
                  clip_eps_vf=None,
                  entropy_coef=args.entropy_coef,
                  standardize_advantages=args.standardize_advantages,
                  args=args)

    elif args.algo == 'airl':
        from customer_behaviour.algorithms.irl.airl import AIRL as G
        from customer_behaviour.algorithms.irl.airl import Discriminator as D
        # obs_normalizer = None
        demonstrations = np.load(dst + '/expert_trajectories.npz')
        D = D(gpu=args.gpu,
              input_dim=input_dim_D - 1,
              hidden_sizes=args.D_layers)  # AIRL only inputs state to D

        agent = G(env=sample_env,
                  demonstrations=demonstrations,
                  discriminator=D,
                  model=model,
                  optimizer=opt,
                  obs_normalizer=obs_normalizer,
                  gpu=args.gpu,
                  update_interval=args.update_interval,
                  minibatch_size=args.batchsize,
                  epochs=args.epochs,
                  clip_eps_vf=None,
                  entropy_coef=args.entropy_coef,
                  standardize_advantages=args.standardize_advantages,
                  noise=args.noise,
                  n_experts=args.n_experts,
                  episode_length=args.episode_length,
                  adam_days=args.adam_days,
                  dummy_D=args.show_D_dummy)

    elif args.algo == 'mmct-gail':
        from customer_behaviour.algorithms.irl.gail.mmct_gail import MMCTGAIL as G
        from customer_behaviour.algorithms.irl.gail import Discriminator as D

        demonstrations = np.load(dst + '/expert_trajectories.npz')
        D = D(gpu=args.gpu,
              input_dim=input_dim_D,
              hidden_sizes=args.D_layers,
              loss_type=args.loss_type)

        agent = G(env=sample_env,
                  demonstrations=demonstrations,
                  discriminator=D,
                  model=model,
                  optimizer=opt,
                  obs_normalizer=obs_normalizer,
                  gpu=args.gpu,
                  update_interval=args.update_interval,
                  minibatch_size=args.batchsize,
                  epochs=args.epochs,
                  clip_eps_vf=None,
                  entropy_coef=args.entropy_coef,
                  standardize_advantages=args.standardize_advantages,
                  args=args)

    if args.load:
        # By default, not in here
        agent.load(args.load)

    if args.demo:
        # By default, not in here
        env = make_env(True)
        eval_stats = experiments.eval_performance(
            env=env,
            agent=agent,
            n_steps=None,
            n_episodes=args.eval_n_runs,
            max_episode_len=timestep_limit)
        print('n_runs: {} mean: {} median: {} stdev {}'.format(
            args.eval_n_runs, eval_stats['mean'], eval_stats['median'],
            eval_stats['stdev']))
        outdir = args.load if args.load else args.outdir
        save_agent_demo(make_env(False), agent, outdir)
    else:
        # Linearly decay the learning rate to zero
        def lr_setter(env, agent, value):
            agent.optimizer.alpha = value

        lr_decay_hook = experiments.LinearInterpolationHook(
            args.steps, args.lr, 0, lr_setter)

        # Linearly decay the clipping parameter to zero
        def clip_eps_setter(env, agent, value):
            agent.clip_eps = max(value, 1e-8)

        clip_eps_decay_hook = experiments.LinearInterpolationHook(
            args.steps, 0.2, 0, clip_eps_setter)

        if train_env is None:
            experiments.train_agent_with_evaluation(
                agent=agent,
                env=make_env(
                    False
                ),  # Environment train the agent against (False -> scaled rewards)
                eval_env=make_env(True),  # Environment used for evaluation
                outdir=args.outdir,
                steps=args.
                steps,  # Total number of timesteps for training (args.n_training_episodes*args.episode_length)
                eval_n_steps=
                None,  # Number of timesteps at each evaluation phase
                eval_n_episodes=args.
                eval_n_runs,  # Number of episodes at each evaluation phase (default: 10)
                eval_interval=args.
                eval_interval,  # Interval of evaluation (defualt: 10000 steps (?))
                train_max_episode_len=
                timestep_limit,  # Maximum episode length during training (is None)
                save_best_so_far_agent=False,
                step_hooks=[
                    lr_decay_hook,
                    clip_eps_decay_hook,
                ],
                checkpoint_freq=args.eval_interval)
        else:
            experiments.train_agent_batch_with_evaluation(
                agent=agent,
                env=train_env,
                steps=args.steps,
                eval_n_steps=None,
                eval_n_episodes=args.eval_n_runs,
                eval_interval=args.eval_interval,
                outdir=args.outdir,
                max_episode_len=timestep_limit,
                eval_max_episode_len=None,
                eval_env=make_env(True),
                step_hooks=[
                    lr_decay_hook,
                    clip_eps_decay_hook,
                ],
                save_best_so_far_agent=False,
                checkpoint_freq=args.eval_interval,
                log_interval=args.update_interval)

        save_agent_demo(
            make_env(True), agent, args.outdir, 10 * args.eval_episode_length
        )  # originally it was make_env(test=False) which seems strange

    # Move result files to correct folder and remove empty folder
    move_dir(args.outdir, dst)
    os.rmdir(args.outdir)

    if args.save_results:
        print('Saving result...')
        res2.save_data(dst, 10000, 50, N=1)

        print('Running evaluate policy...')
        ep.eval_policy(a_dir_path=dst)

    # else:
    #     if args.n_experts <= 10:
    #         print('Running evaluate policy...')
    #         ep.eval_policy(a_dir_path=dst)
    #         # print('Running evaluate training...')
    #         # ets.eval_training(a_dir_path=dst)
    #         print('Done')

    if args.save_report_material:
        print('Saving dataframe...')
        if args.state_rep == 21:
            if args.algo == 'gail':
                folder_name = 'gail'
            elif args.algo == 'airl':
                folder_name = 'airl'
        elif args.state_rep == 22:
            if args.algo == 'gail':
                folder_name = 'gail_dummies'
            elif args.algo == 'airl':
                folder_name = 'airl_dummies'
        elif args.state_rep == 81:
            if args.algo == 'gail':
                folder_name = 'gail_adams'
            elif args.algo == 'airl':
                folder_name = 'airl_adams'
        elif args.state_rep == 17:
            folder_name = 'ail'
        elif args.state_rep == 221:
            folder_name = 'ail_dummies'
        elif args.state_rep == 71:
            folder_name = 'ail_adams'

        report_material.save_df(dst, folder_name)

    if args.save_folder is not None:
        print('Saving result to ' + args.save_folder)
        os.makedirs(os.path.join(os.getcwd(), args.save_folder), exist_ok=True)
        from distutils.dir_util import copy_tree
        copy_tree(
            os.path.join(os.getcwd(), dst),
            os.path.join(os.getcwd(), args.save_folder,
                         args.outdir.split('/')[-2]))
Esempio n. 44
0
def train(dataset,
          learn_step=0.005,
          weight_decay=1e-4,
          num_epochs=500,
          max_patience=100,
          data_augmentation={},
          savepath=None,
          loadpath=None,
          early_stop_class=None,
          batch_size=None,
          resume=False,
          train_from_0_255=False):

    #
    # Prepare load/save directories
    #
    exp_name = 'unet_' + 'data_aug' if bool(data_augmentation) else ''

    if savepath is None:
        raise ValueError('A saving directory must be specified')

    savepath = os.path.join(savepath, dataset, exp_name)
    # loadpath = os.path.join(loadpath, dataset, exp_name)
    print(savepath)
    # print loadpath

    if not os.path.exists(savepath):
        os.makedirs(savepath)
    else:
        print('\033[93m The following folder already exists {}. '
              'It will be overwritten in a few seconds...\033[0m'.format(
                  savepath))

    print('Saving directory : ' + savepath)
    with open(os.path.join(savepath, "config.txt"), "w") as f:
        for key, value in locals().items():
            f.write('{} = {}\n'.format(key, value))

    #
    # Define symbolic variables
    #
    input_var = T.tensor4('input_var')
    target_var = T.ivector('target_var')

    #
    # Build dataset iterator
    #
    if batch_size is not None:
        bs = batch_size
    else:
        bs = [10, 1, 1]

    train_iter = IsbiEmStacksDataset(which_set='train',
                                     batch_size=batch_size[0],
                                     seq_per_subset=0,
                                     seq_length=0,
                                     data_augm_kwargs=data_augmentation,
                                     return_one_hot=False,
                                     return_01c=False,
                                     overlap=0,
                                     use_threads=True,
                                     shuffle_at_each_epoch=True,
                                     return_list=True,
                                     return_0_255=False)

    val_iter = IsbiEmStacksDataset(which_set='val',
                                   batch_size=batch_size[1],
                                   seq_per_subset=0,
                                   seq_length=0,
                                   return_one_hot=False,
                                   return_01c=False,
                                   use_threads=True,
                                   shuffle_at_each_epoch=False,
                                   return_list=True,
                                   return_0_255=False)
    test_iter = None

    batch = train_iter.next()
    input_dim = (np.shape(batch[0])[2], np.shape(batch[0])[3]
                 )  #(x,y) image shape

    n_batches_train = train_iter.nbatches
    n_batches_val = val_iter.nbatches
    n_batches_test = test_iter.nbatches if test_iter is not None else 0
    n_classes = train_iter.non_void_nclasses
    void_labels = train_iter.void_labels
    nb_in_channels = train_iter.data_shape[0]

    print("Batch. train: %d, val %d, test %d" %
          (n_batches_train, n_batches_val, n_batches_test))
    print("Nb of classes: %d" % (n_classes))
    print("Nb. of input channels: %d" % (nb_in_channels))

    #
    # Build network
    #

    net = build_UNet(
        n_input_channels=nb_in_channels,  # BATCH_SIZE = batch_size,
        num_output_classes=n_classes,
        base_n_filters=64,
        do_dropout=False,
        input_dim=(None, None))

    output_layer = net["output_flattened"]
    #
    # Define and compile theano functions
    #
    print("Defining and compiling training functions")
    prediction = lasagne.layers.get_output(output_layer, input_var)
    loss = crossentropy_metric(prediction, target_var, void_labels)

    if weight_decay > 0:
        weightsl2 = regularize_network_params(output_layer,
                                              lasagne.regularization.l2)
        loss += weight_decay * weightsl2

    params = lasagne.layers.get_all_params(output_layer, trainable=True)
    updates = lasagne.updates.adam(loss, params, learning_rate=learn_step)

    train_fn = theano.function([input_var, target_var], loss, updates=updates)

    print("Defining and compiling test functions")
    test_prediction = lasagne.layers.get_output(output_layer,
                                                input_var,
                                                deterministic=True)
    test_loss = crossentropy_metric(test_prediction, target_var, void_labels)
    test_acc = accuracy_metric(test_prediction, target_var, void_labels)
    test_jacc = jaccard_metric(test_prediction, target_var, n_classes)

    val_fn = theano.function([input_var, target_var],
                             [test_loss, test_acc, test_jacc])

    #
    # Train
    #
    err_train = []
    err_valid = []
    acc_valid = []
    jacc_valid = []
    patience = 0

    # Training main loop
    print("Start training")
    for epoch in range(num_epochs):
        # Single epoch training and validation
        start_time = time.time()
        cost_train_tot = 0
        # Train
        print('Training steps ')
        for i in range(n_batches_train):
            print(i)
            # Get minibatch
            X_train_batch, L_train_batch = train_iter.next()
            L_train_batch = np.reshape(L_train_batch,
                                       np.prod(L_train_batch.shape))

            # Training step
            cost_train = train_fn(X_train_batch, L_train_batch)
            out_str = "cost %f" % (cost_train)
            cost_train_tot += cost_train

        err_train += [cost_train_tot / n_batches_train]

        # Validation
        cost_val_tot = 0
        acc_val_tot = 0
        jacc_val_tot = np.zeros((2, n_classes))

        print('Validation steps')
        for i in range(n_batches_val):
            print(i)
            # Get minibatch
            X_val_batch, L_val_batch = val_iter.next()
            L_val_batch = np.reshape(L_val_batch, np.prod(L_val_batch.shape))

            # Validation step
            cost_val, acc_val, jacc_val = val_fn(X_val_batch, L_val_batch)

            acc_val_tot += acc_val
            cost_val_tot += cost_val
            jacc_val_tot += jacc_val

        err_valid += [cost_val_tot / n_batches_val]
        acc_valid += [acc_val_tot / n_batches_val]
        jacc_perclass_valid = jacc_val_tot[0, :] / jacc_val_tot[1, :]
        if early_stop_class == None:
            jacc_valid += [np.mean(jacc_perclass_valid)]
        else:
            jacc_valid += [jacc_perclass_valid[early_stop_class]]


        out_str = "EPOCH %i: Avg epoch training cost train %f, cost val %f" +\
            ", acc val %f, jacc val class 0 % f, jacc val class 1 %f, jacc val %f took %f s"
        out_str = out_str % (epoch, err_train[epoch], err_valid[epoch],
                             acc_valid[epoch], jacc_perclass_valid[0],
                             jacc_perclass_valid[1], jacc_valid[epoch],
                             time.time() - start_time)
        print(out_str)

        with open(os.path.join(savepath, "unet_output.log"), "a") as f:
            f.write(out_str + "\n")

        # Early stopping and saving stuff
        if epoch == 0:
            best_jacc_val = jacc_valid[epoch]
        elif epoch > 1 and jacc_valid[epoch] > best_jacc_val:
            best_jacc_val = jacc_valid[epoch]
            patience = 0
            np.savez(os.path.join(savepath, 'new_unet_model_best.npz'),
                     *lasagne.layers.get_all_param_values(output_layer))
            np.savez(os.path.join(savepath, 'unet_errors_best.npz'), err_valid,
                     err_train, acc_valid, jacc_valid)
        else:
            patience += 1

        np.savez(os.path.join(savepath, 'new_unet_model_last.npz'),
                 *lasagne.layers.get_all_param_values(output_layer))
        np.savez(os.path.join(savepath, 'unet_errors_last.npz'), err_valid,
                 err_train, acc_valid, jacc_valid)
        # Finish training if patience has expired or max nber of epochs
        # reached
        if patience == max_patience or epoch == num_epochs - 1:
            if test_iter is not None:
                # Load best model weights
                with np.load(os.path.join(savepath,
                                          'new_unet_model_best.npz')) as f:
                    param_values = [
                        f['arr_%d' % i] for i in range(len(f.files))
                    ]
                nlayers = len(lasagne.layers.get_all_params(output_layer))
                lasagne.layers.set_all_param_values(output_layer,
                                                    param_values[:nlayers])
                # Test
                cost_test_tot = 0
                acc_test_tot = 0
                jacc_test_tot = np.zeros((2, n_classes))
                for i in range(n_batches_test):
                    # Get minibatch
                    X_test_batch, L_test_batch = test_iter.next()
                    L_test_batch = np.reshape(L_test_batch,
                                              np.prod(L_test_batch.shape))

                    # Test step
                    cost_test, acc_test, jacc_test = val_fn(
                        X_test_batch, L_test_batch)

                    acc_test_tot += acc_test
                    cost_test_tot += cost_test
                    jacc_test_tot += jacc_test

                err_test = cost_test_tot / n_batches_test
                acc_test = acc_test_tot / n_batches_test
                jacc_test_perclass = jacc_test_tot[0, :] / jacc_test_tot[1, :]
                jacc_test = np.mean(jacc_test_perclass)

                out_str = "FINAL MODEL: err test % f, acc test %f, " +\
                    "jacc test class 0 %f, jacc test class 1 %f, jacc test %f"
                out_str = out_str % (err_test, acc_test, jacc_test_perclass[0],
                                     jacc_test_perclass[1], jacc_test)
                print(out_str)
            if savepath != loadpath:
                print('Copying model and other training files to {}'.format(
                    loadpath))
                copy_tree(savepath, loadpath)

            # End
            return
    def split_train_val_test(self,
                             val_fraction=0.222,
                             test_fraction=0.1,
                             balance=False,
                             clip=False):
        '''split data into train/val and test sets, with test set by by patient and train/val by image
        :param val_factor - percent to be left out as test set on PATIENT level
        :param test_faction - percent to be left out as test set on PATIENT level
        :param: balance (bool) --> perform oversampling of the minority class
        '''

        #first, get a list of all the patients
        patients = [patient for patient in os.listdir(self.tumors_by_pt)]

        #make test set on patient level, subtract these patients from others
        test_patients = [
            patients[i] for i in random.sample(
                range(len(patients)), int(len(patients) * test_fraction))
        ]
        train_val_patients = list(set(patients) - set(test_patients))
        val_patients = [
            train_val_patients[i]
            for i in random.sample(range(len(train_val_patients)),
                                   int(len(train_val_patients) * val_fraction))
        ]
        train_patients = list(set(train_val_patients) - set(val_patients))

        print('total of {} test patients'.format(len(test_patients)))
        print('total of {} val patients'.format(len(val_patients)))
        print('total of {} train patients'.format(len(train_patients)))

        #obtain test_tumors
        test_sample = self.find_sample(test_patients)
        val_sample = self.find_sample(val_patients)
        train_sample = self.find_sample(train_patients)
        train_val_dict = {'train': train_sample, 'val': val_sample}

        #set up basic file structure:
        #set up file structure
        if not os.path.exists(self.model_dev):
            os.mkdir(self.model_dev)
        for filetype in [
                'train', 'val', 'test', 'val_pt', 'all_train_images',
                'all_val_images'
        ]:
            if not os.path.exists(os.path.join(self.model_dev, filetype)):
                os.mkdir(os.path.join(self.model_dev, filetype))
        for filetype in ['train', 'val']:
            for PIRADS in ['PIRADS_2', 'PIRADS_3', 'PIRADS_4', 'PIRADS_5']:
                if not os.path.exists(
                        os.path.join(self.model_dev, filetype, PIRADS)):
                    os.mkdir(os.path.join(self.model_dev, filetype, PIRADS))

        #first, copy all tumor-level information into a new file
        print("making test set")
        for tumor in test_sample:
            print(tumor)
            copy_tree(os.path.join(self.tumors, tumor),
                      os.path.join(self.model_dev, 'test', tumor))

        print('making val_pt set')
        for tumor in val_sample:
            copy_tree(os.path.join(self.tumors, tumor),
                      os.path.join(self.model_dev, 'val_pt', tumor))

        #next, copy all training images to an 'all folder'
        print('copying all train/val images to new folder')
        for dataset in train_val_dict.keys():
            print(dataset)
            sample = train_val_dict[dataset]
            for tumor in sample:
                files = os.listdir(os.path.join(self.tumors, tumor))
                if dataset == 'train':

                    if clip == True:
                        print(
                            "length of files is {} for tumor {} before clipping"
                            .format(len(files), tumor))
                        if len(files) > 3:
                            del files[0]
                            del files[-1]
                            print(
                                "length of files is {} for tumor {} after clipping"
                                .format(len(files), tumor))

                for file in files:
                    shutil.copy2(os.path.join(self.tumors,tumor,file),\
                                 os.path.join(self.model_dev,'all_'+dataset+'_images',file))

        #split data by file
        for key in train_val_dict.keys():
            print("making {} set".format(key))
            for file in os.listdir(
                    os.path.join(self.model_dev, 'all_' + key + '_images')):
                if file != 'Thumbs.db':
                    print(file)
                    if file.split('_')[8] == '2':
                        shutil.copy2(
                            os.path.join(self.model_dev,
                                         'all_' + key + '_images', file),
                            os.path.join(self.model_dev, key, 'PIRADS_2',
                                         file))
                    if file.split('_')[8] == '3':
                        shutil.copy2(
                            os.path.join(self.model_dev,
                                         'all_' + key + '_images', file),
                            os.path.join(self.model_dev, key, 'PIRADS_3',
                                         file))
                    if file.split('_')[8] == '4':
                        shutil.copy2(
                            os.path.join(self.model_dev,
                                         'all_' + key + '_images', file),
                            os.path.join(self.model_dev, key, 'PIRADS_4',
                                         file))
                    if file.split('_')[8] == '5':
                        shutil.copy2(
                            os.path.join(self.model_dev,
                                         'all_' + key + '_images', file),
                            os.path.join(self.model_dev, key, 'PIRADS_5',
                                         file))
        if balance == True:
            self.balance_dataset(cat1='PIRADS_2',
                                 cat2='PIRADS_3',
                                 cat3='PIRADS_4',
                                 cat4='PIRADS_5')