Beispiel #1
0
def exe():
    makedirs( DIST_DIR )
    shutil.copy( "pyautocore.pyd", DIST_DIR )
    shutil.copy( "__init__.py", DIST_DIR )
    shutil.copy( "pyauto_const.py", DIST_DIR )
    shutil.copy( "pyauto_input.py", DIST_DIR )
    rmtree( DIST_DIR + "/sample" )
    shutil.copytree( "sample", DIST_DIR + "/sample", ignore=shutil.ignore_patterns(".svn","*.pyc","*.pyo") )
    rmtree( DIST_DIR + "/doc" )
    shutil.copytree( "doc/html", DIST_DIR + "/doc", ignore=shutil.ignore_patterns(".svn") )

    if 1:
        os.chdir("dist")
        createZip( "pyauto.zip", DIST_FILES )
        os.chdir("..")
    
    fd = open("dist/pyauto.zip","rb")
    m = hashlib.md5()
    while 1:
        data = fd.read( 1024 * 1024 )
        if not data: break
        m.update(data)
    fd.close()
    print( "" )
    print( m.hexdigest() )
    def prepare(self):
        root_dir = self.get_bindings_root_directory()

        # Create directories
        common.recreate_directory(self.tmp_dir)
        os.makedirs(self.tmp_examples_dir)
        os.makedirs(self.tmp_source_dir)
        os.makedirs(self.tmp_build_dir)

        # Copy blockly and closure-library to build directory
        shutil.copytree(os.path.join(root_dir, '..', '..', 'tvpl-blockly'), self.tmp_build_blockly_dir,
                        ignore=shutil.ignore_patterns('*/.git'))
        shutil.copytree(os.path.join(root_dir, '..', '..', 'tvpl-closure-library'), self.tmp_build_closure_library_dir,
                        ignore=shutil.ignore_patterns('*/.git', '*_test.js'))

        # Copy css/, js/, index.html and programEditor.html
        shutil.copytree(os.path.join(root_dir, 'css'), os.path.join(self.tmp_source_dir, 'css'))
        shutil.copytree(os.path.join(root_dir, 'js'), os.path.join(self.tmp_source_dir, 'js'))
        shutil.copy(os.path.join(root_dir, 'index.html'), self.tmp_source_dir)
        shutil.copy(os.path.join(root_dir, 'programEditor.html'), self.tmp_source_dir)

        # Copy changelog.txt and readme.txt
        shutil.copy(os.path.join(root_dir, 'changelog.txt'),self.tmp_dir)
        shutil.copy(os.path.join(root_dir, 'readme.txt'),self.tmp_dir)

        # Generate JavaScript bindings
        with common.ChangedDirectory(os.path.join(root_dir, '..', 'javascript')):
            common.execute(['python', 'generate_javascript_bindings.py'])
            common.execute(['python', 'generate_javascript_zip.py'])

        shutil.copy(os.path.join(self.tmp_javascript_dir, 'browser', 'source', 'Tinkerforge.js'),
                    os.path.join(self.tmp_source_dir, 'js', 'Tinkerforge.js'))
Beispiel #3
0
def CopyDart2Js(build_dir, sdk_root):
  '''
  Install dart2js in SDK/lib/dart2js.

  Currently, we copy too much stuff to this location, but the SDK's
  layout matches the the layout of the part of the repository we're
  dealing with here which frees us from rewriting files. The long term
  plan is to align the layout of the repository and the SDK, at which
  point we should be able to simplify Main below and share the dart
  files between the various components to minimize SDK download size.
  '''
  copytree('lib', os.path.join(sdk_root, 'lib', 'dart2js', 'lib'),
           ignore=ignore_patterns('.svn'))
  copytree(os.path.join('corelib', 'src'),
           os.path.join(sdk_root, 'lib', 'dart2js', 'corelib', 'src'),
           ignore=ignore_patterns('.svn'))
  copytree(os.path.join('runtime', 'lib'),
           os.path.join(sdk_root, 'lib', 'dart2js', 'runtime', 'lib'),
           ignore=ignore_patterns('.svn'))
  copytree(os.path.join('runtime', 'bin'),
           os.path.join(sdk_root, 'lib', 'dart2js', 'runtime', 'bin'),
           ignore=ignore_patterns('.svn'))
  if utils.GuessOS() == 'win32':
    dart2js = os.path.join(sdk_root, 'bin', 'dart2js.bat')
    Copy(os.path.join(build_dir, 'dart2js.bat'), dart2js)
    ReplaceInFiles([dart2js],
                   [(r'%SCRIPTPATH%\.\.\\lib',
                     r'%SCRIPTPATH%..\lib\dart2js\lib')])
  else:
    dart2js = os.path.join(sdk_root, 'bin', 'dart2js')
    Copy(os.path.join(build_dir, 'dart2js'), dart2js)
    ReplaceInFiles([dart2js],
                   [(r'\$BIN_DIR/\.\./\.\./lib',
                     r'$BIN_DIR/../lib/dart2js/lib')])
Beispiel #4
0
    def save_firefox_profile(self, remove_old=False):
        """Function to save the firefox profile to the permanant one"""
        self.logger.info("Saving profile from %s to %s" % (self._profile.path, self._profile_path))

        if remove_old:
            if os.path.exists(self._profile_path):
                try:
                    shutil.rmtree(self._profile_path)
                except OSError:
                    pass

            shutil.copytree(os.path.join(self._profile.path), self._profile_path,
                            ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
        else:
            for item in os.listdir(self._profile.path):
                if item in ["parent.lock", "lock", ".parentlock"]:
                    continue
                s = os.path.join(self._profile.path, item)
                d = os.path.join(self._profile_path, item)
                if os.path.isdir(s):
                    shutil.copytree(s, d,
                                    ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
                else:
                    shutil.copy2(s, d)

        with open(os.path.join(self._profile_path, self._LOCAL_STORAGE_FILE), 'w') as f:
            f.write(dumps(self.get_local_storage()))
Beispiel #5
0
def _perform_utilproc(src, cmd_args,
                      on_cmd=None, include=None, ignore=None, verbose=False):
    if include is None:
        return

    names = os.listdir(src)
    inc_p = ignore_patterns(*include)
    include_names = inc_p(src, names)

    if ignore is None:
        ignore_files = set()
    else:
        patterns = ignore_patterns(*ignore)
        ignore_files = patterns(src, names)

    for name in names:
        if name in ignore_files:
            if verbose:
                cprint.warning('ignored: ', os.path.join(src, name))
            continue

        srcname = os.path.join(src, name)
        if not os.path.isdir(srcname) and name not in include_names:
            if verbose:
                cprint.warning('excluded: ', os.path.join(src, name))
            continue

        try:
            if os.path.isdir(srcname):
                _perform_utilproc(srcname,
                                  cmd_args,
                                  on_cmd=on_cmd,
                                  include=include,
                                  ignore=ignore,
                                  verbose=verbose)
            else:
                args = [a if a != '{FILENAME}' else srcname for a in cmd_args]
                if verbose:
                    cprint.ok(str(' '.join(c for c in args)))

                p = Popen(args, stdout=PIPE, stderr=PIPE)
                stdout, stderr = p.communicate()

                if p.returncode != 0:
                    cprint.error('ERROR PROCESSING: ', '{0} -> {1}'.
                                 format(srcname, args))
                    cprint.error(stderr)
                    continue

                if on_cmd is not None:
                    on_cmd(srcname, stdout)
                elif verbose:
                    cprint(stdout)

        except (IOError, os.error) as err:
            msg = "Failed to perform util on {0}: ".format(srcname, str(err))
            return (False, msg)

    return (True, None)
Beispiel #6
0
    def do_copy(self, src, dest, perms=None):
        """Copy the src dir to the dest dir omitting the self.ignore globs."""
        if os.path.isdir(self.dest_path):
            self.copy_if_missing(src, dest, ignore=shutil.ignore_patterns(*self.ignore))
        else:
            shutil.copytree(src, dest, ignore=shutil.ignore_patterns(*self.ignore))

        if perms:
            for dirname, dirnames, filenames in os.walk(dest):
                for filename in filenames:
                    os.chmod(os.path.join(dirname, filename), perms)
Beispiel #7
0
def _copy_data_files(rpath, datadir, rdatadir):
    try:
        shutil.copytree(os.path.join(DATA_DIR, rpath), os.path.join(datadir,
                    rpath), ignore=shutil.ignore_patterns('*.pyc', '*.pyo'))
    except FileNotFoundError:
        pass

    try:
        shutil.copytree(os.path.join(RDATA_DIR, rpath), os.path.join(rdatadir,
                    rpath), ignore=shutil.ignore_patterns('*.pyc', '*.pyo'))
    except FileNotFoundError:
        pass
Beispiel #8
0
def cmd_co(db, tree_name, path):
    check_path_shape(path)
    curs = db.cursor()

    # sanity checks
    curs.execute("SELECT * FROM checkout WHERE pkgpath = ?", (path, ))
    if len(curs.fetchall()) != 0:
        print("Error: Already checked out in %s" % \
            os.path.join(MYSTUFF_PATH, path))
        exit_nicely(db)

    if tree_name == "wip":
        tree = WIP_PATH
        origin = ORIGIN_WIP
    elif tree_name ==  "main":
        tree = PORTS_PATH
        origin = ORIGIN_PORTS
    else:
        print("Error: Bad tree path. Should be either 'wip' or 'main'")
        exit_nicely(db)

    if os.path.exists(os.path.join(MYSTUFF_PATH, path)):
        print("error: Destination path exists: %s" % \
            os.path.join(MYSTUFF_PATH, path))
        exit_nicely(db)

    if not os.path.exists(os.path.join(tree, path)):
        print("error: Source path does not exist: %s" % \
            os.path.join(tree, path))
        exit_nicely(db)

    # We might have to create the category directory
    category_dir = os.path.dirname(os.path.join(MYSTUFF_PATH, path))
    if not os.path.exists(category_dir):
        os.mkdir(category_dir)

    # Copy in from source
    shutil.copytree( \
        os.path.join(tree, path), os.path.join(MYSTUFF_PATH, path), \
        ignore=shutil.ignore_patterns("CVS"))

    # Archive away a copy for merges
    shutil.copytree(os.path.join(tree, path), os.path.join(ARCHIVE_PATH, path), \
        ignore=shutil.ignore_patterns("CVS"))

    # Update db
    curs.execute("INSERT INTO checkout (pkgpath, origin, flags) VALUES " + \
        "(?, ?, ?)", (path, origin, 0))
    db.commit()

    print("Port checked out into %s" % (os.path.join(MYSTUFF_PATH, path)))
Beispiel #9
0
def copy_dirs():
	if os.path.exists("dist") == False:
		os.mkdir("dist")
		
	if os.path.exists(os.path.join("dist", "docs")):
		shutil.rmtree(os.path.join("dist", "docs"))
	if os.path.exists(os.path.join("dist", "bin")):
		shutil.rmtree(os.path.join("dist", "bin"))
	if os.path.exists(os.path.join("dist", "etc")):
		shutil.rmtree(os.path.join("dist", "etc"))
	if os.path.exists(os.path.join("dist", "interface")):
		shutil.rmtree(os.path.join("dist", "interface"))
	if os.path.exists(os.path.join("dist", "lib")):
		shutil.rmtree(os.path.join("dist", "lib"))
	if os.path.exists(os.path.join("dist", "pixmaps")):
		shutil.rmtree(os.path.join("dist", "pixmaps"))
	if os.path.exists(os.path.join("dist", "share")):
		shutil.rmtree(os.path.join("dist", "share"))
	if os.path.exists(os.path.join("dist", "po")):
		shutil.rmtree(os.path.join("dist", "po"))
		
	shutil.copytree("docs", os.path.join("dist", "docs"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("bin", os.path.join("dist", "bin"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("etc", os.path.join("dist", "etc"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("interface", os.path.join("dist", "interface"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("lib", os.path.join("dist", "lib"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("pixmaps", os.path.join("dist", "pixmaps"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("share", os.path.join("dist", "share"), ignore=shutil.ignore_patterns('.git*'))
	shutil.copytree("po", os.path.join("dist", "po"), ignore=shutil.ignore_patterns('.git*'))
Beispiel #10
0
def copy_application(src, dst):

    application_name = os.path.basename(src)

    if not os.path.exists(dst):
        shutil.copytree(src, dst, ignore=shutil.ignore_patterns(IGNORE_PATTERNS))
        os.remove(dst+"/__init__.py")
        os.remove(dst+"/__init__.pyc")
        
        app_files = os.listdir(dst+"/application/")
        app_files = [dst+"/application/"+filename for filename in app_files]
        
        for file in app_files:
            shutil.move(file, dst)
    
        os.rmdir(dst+"/application/")

        #every wordpress plugin becomes a new application
        if (dst.find("wordpress") != -1 and os.path.exists(dst+"/plugins/")):
            plugins = os.listdir(dst+"/plugins/")

            if(".DS_Store" in plugins):
                plugins.remove(".DS_Store")

            plugins = [dst+"/plugins/"+filename for filename in plugins]
            
            for plugin in plugins:
                
                plugin_name = os.path.basename(plugin)
                plugin_dst = settings.applications_path+application_name+"_"+plugin_name
                
                shutil.copytree(dst,plugin_dst, ignore=shutil.ignore_patterns(IGNORE_PATTERNS))
                
                os.makedirs(plugin_dst+"/wp-content/plugins/"+plugin_name.split("_")[0]+"/")

                if(os.path.isfile(plugin_dst+"/plugins/"+plugin_name+"/database.sql")):
                    shutil.move(plugin_dst+"/plugins/"+plugin_name+"/database.sql", plugin_dst+"/wp-content/plugins/"+plugin_name.split("_")[0]+"/")
                
                plugin_files = os.listdir(plugin_dst+"/plugins/"+plugin_name+"/plugin/")
                plugin_files = [plugin_dst+"/plugins/"+plugin_name+"/plugin/"+filename for filename in plugin_files]
                
                for plugin_file in plugin_files:
                    shutil.move(plugin_file, plugin_dst+"/wp-content/plugins/"+plugin_name.split("_")[0]+"/")

                shutil.rmtree(plugin_dst+"/plugins/")
                generate_configuration(application_name+"_"+plugin_name, plugin_name=plugin_name)

    else:
        print("application " +dst+ " already imported")
Beispiel #11
0
    def install(self, spec, prefix):
        """Install under the projectdir (== prefix/name-version)"""
        self.build(spec, prefix)  # Should be a separate phase
        opts = self.wm_options

        # Fairly ugly since intermediate targets are scattered inside sources
        appdir = 'applications'
        mkdirp(self.projectdir, join_path(self.projectdir, appdir))

        # Retain build log file
        out = "spack-build.out"
        if isfile(out):
            install(out, join_path(self.projectdir, "log." + opts))

        # All top-level files, except spack build info and possibly Allwmake
        if '+source' in spec:
            ignored = re.compile(r'^spack-.*')
        else:
            ignored = re.compile(r'^(Allclean|Allwmake|spack-).*')

        files = [
            f for f in glob.glob("*") if isfile(f) and not ignored.search(f)
        ]
        for f in files:
            install(f, self.projectdir)

        # Install directories. install applications/bin directly
        for d in ['bin', 'etc', 'wmake', 'lib', join_path(appdir, 'bin')]:
            install_tree(
                d,
                join_path(self.projectdir, d))

        if '+source' in spec:
            subitem = join_path(appdir, 'Allwmake')
            install(subitem, join_path(self.projectdir, subitem))

            ignored = [opts]  # Intermediate targets
            for d in ['src', 'tutorials']:
                install_tree(
                    d,
                    join_path(self.projectdir, d),
                    ignore=shutil.ignore_patterns(*ignored))

            for d in ['solvers', 'utilities']:
                install_tree(
                    join_path(appdir, d),
                    join_path(self.projectdir, appdir, d),
                    ignore=shutil.ignore_patterns(*ignored))
Beispiel #12
0
 def run(self):
     dst = self.config.get_dst_folder()
     cdv_dst = self.config.get_cordova_dst_folder(self.key)
     if os.path.exists(cdv_dst):
         names = os.listdir(cdv_dst)
         for name in names:
             if not name.startswith('.'):
                 name = os.path.join(cdv_dst, name)
                 if os.path.isfile(name):
                     os.remove(name)
                 else:
                     shutil.rmtree(name)
     names = os.listdir(dst)
     for name in names:
         if not name.startswith('.'):
             src = os.path.join(dst, name)
             copy = os.path.join(cdv_dst, name)
             if os.path.isfile(src):
                 shutil.copy(src, copy)
             else:
                 shutil.copytree(src, copy, ignore=shutil.ignore_patterns('.*'))
     for r, d, f in os.walk(cdv_dst):
         for files in filter(lambda x: x.endswith('.html'), f):
             p = os.path.join(r, files)
             self.replace_cordova_tag(p)
     self.copy_icons(dst)
     self.copy_splash(dst)
Beispiel #13
0
def virtualenv(tmpdir, monkeypatch):
    """
    Return a virtual environment which is unique to each test function
    invocation created inside of a sub directory of the test function's
    temporary directory. The returned object is a
    ``tests.lib.venv.VirtualEnvironment`` object.
    """
    # Force shutil to use the older method of rmtree that didn't use the fd
    # functions. These seem to fail on Travis (and only on Travis).
    monkeypatch.setattr(shutil, "_use_fd_functions", False, raising=False)

    # Copy over our source tree so that each virtual environment is self
    # contained
    pip_src = tmpdir.join("pip_src").abspath
    shutil.copytree(
        SRC_DIR,
        pip_src,
        ignore=shutil.ignore_patterns(
            "*.pyc", "tests", "pip.egg-info", "build", "dist", ".tox",
        ),
    )

    # Create the virtual environment
    venv = VirtualEnvironment.create(
        tmpdir.join("workspace", "venv"),
        pip_source_dir=pip_src,
    )

    # Undo our monkeypatching of shutil
    monkeypatch.undo()

    return venv
Beispiel #14
0
    def __init__(self, profile_directory=None):
        """
        Initialises a new instance of a Firefox Profile

        :args:
         - profile_directory: Directory of profile that you want to use.
           This defaults to None and will create a new
           directory when object is created.
        """
        if not FirefoxProfile.DEFAULT_PREFERENCES:
            with open(os.path.join(os.path.dirname(__file__),
                                   WEBDRIVER_PREFERENCES)) as default_prefs:
                FirefoxProfile.DEFAULT_PREFERENCES = json.load(default_prefs)

        self.default_preferences = copy.deepcopy(
            FirefoxProfile.DEFAULT_PREFERENCES['mutable'])
        self.native_events_enabled = True
        self.profile_dir = profile_directory
        self.tempfolder = None
        if self.profile_dir is None:
            self.profile_dir = self._create_tempfolder()
        else:
            self.tempfolder = tempfile.mkdtemp()
            newprof = os.path.join(self.tempfolder, "webdriver-py-profilecopy")
            shutil.copytree(self.profile_dir, newprof,
                            ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
            self.profile_dir = newprof
            self._read_existing_userjs(os.path.join(self.profile_dir, "user.js"))
        self.extensionsDir = os.path.join(self.profile_dir, "extensions")
        self.userPrefs = os.path.join(self.profile_dir, "user.js")
Beispiel #15
0
def copyonly(dirpath, contents):
     ignore_list = set(contents) - set(shutil.ignore_patterns(*exts_to_keep)(dirpath, contents),)
     ignore_list2 = list(ignore_list)
     for item in ignore_list:
        if os.path.isdir(dirpath + '/' + item): ignore_list2.remove(item)

     return ignore_list2
Beispiel #16
0
def packaging(src):
	"""
		reading install.rdf and packaging a xpi file. 
		for example: 
			xxx-0.1.xpi
	"""
	work_copy = osp.dirname(src)
	
	addon_info = "".join(open(work_copy + osp.sep + "install.rdf"))
	addon_name = re.search("(?<=em\:name\=\").*(?=\")",addon_info).group(0)
	addon_version =  re.search("(?<=em\:version\=\").*(?=\")",addon_info).group(0)

	temp_copy_base = tempfile.mkdtemp()
	temp_copy = osp.join(temp_copy_base,addon_name)
	
	xpi_name = "%s-%s.xpi" % (addon_name,addon_version)
	xpi_fullpath = osp.join(work_copy,xpi_name);
	
	print """
	Add-on    : %s
	Version   : %s
	Work Copy : %s
	Temp Copy : %s
	XPI File  : %s
	""" % (addon_name,addon_version,work_copy,temp_copy, xpi_name)

	print "copying work to temp dir..."
	copytree(work_copy,temp_copy,ignore=ignore_patterns('scriptdemo','*.xpi','.*','*.bat','*.py','*LOG','*~','*.swp'))

	print "packaging xpi..."
	compress(temp_copy,xpi_fullpath);

	print "cleaning..."
	rmtree(temp_copy_base)
Beispiel #17
0
    def __init__(self,profile_directory=None):
        """
        Initialises a new instance of a Firefox Profile

        :args:
         - profile_directory: Directory of profile that you want to use.
           This defaults to None and will create a new
           directory when object is created.
        """
        self.default_preferences = copy.deepcopy(
            FirefoxProfile.DEFAULT_PREFERENCES)
        self.profile_dir = profile_directory
        self.tempfolder = None
        if self.profile_dir is None:
            self.profile_dir = self._create_tempfolder()
        else:
            self.tempfolder = tempfile.mkdtemp()
            newprof = os.path.join(self.tempfolder,
                "webdriver-py-profilecopy")
            shutil.copytree(self.profile_dir, newprof,
                ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
            self.profile_dir = newprof
            self._read_existing_userjs()
        self.extensionsDir = os.path.join(self.profile_dir, "extensions")
        self.userPrefs = os.path.join(self.profile_dir, "user.js")
Beispiel #18
0
    def _init(self):
        from natsort import natsorted
        folders = natsorted(glob(path.join(self.generatorspath, '*', ''))) # I need the extra ''  to add a finishing /
        if len(folders) == 0:
            logger.info('Generators folder has no subdirectories, using folder itself')
            folders.append(self.generatorspath)

        numF = len(folders)
        numCopies = np.ones(numF, dtype=int) * int(np.floor(self.nmax / numF))
        numExtra = np.mod(self.nmax, numF)
        extraChoices = np.random.choice(numF, numExtra, replace=False) # draw the extra
        numCopies[extraChoices] += 1
        # numCopies = numCopies + np.random.multinomial(numExtra, [1/numF]*numF)  # draw the extra equally from a flat distribution
        if not path.exists(self.inputpath):
            makedirs(self.inputpath)

        # Check if epoch 1 directories already exist in the input folder
        existing = glob(path.join(self.inputpath, 'e1s*'))
        if len(existing) != 0:
            raise NameError('Epoch 1 directories already exist.')

        k = 1
        for i in range(numF):
            for j in range(numCopies[i]):
                name = _simName(folders[i])
                inputdir = path.join(self.inputpath, 'e1s' + str(k) + '_' + name)
                #src = path.join(self.generatorspath, name, '*')
                src = folders[i]
                copytree(src, inputdir, symlinks=True, ignore=ignore_patterns(*_IGNORE_EXTENSIONS))
                k += 1
Beispiel #19
0
def _writeInputsFunction(i, f, epoch, inputpath, coorname):
    regex = re.compile('(e\d+s\d+)_')
    frameNum = f.frame
    piece = f.piece
    if f.sim.parent is None:
        currSim = f.sim
    else:
        currSim = f.sim.parent

    traj = currSim.trajectory[piece]
    if currSim.input is None:
        raise NameError('Could not find input folder in simulation lists. Cannot create new simulations.')

    wuName = _simName(traj)
    res = regex.search(wuName)
    if res:  # If we are running on top of adaptive, use the first name part for the next sim name
        wuName = res.group(1)

    # create new job directory
    newName = 'e' + str(epoch) + 's' + str(i + 1) + '_' + wuName + 'p' + str(piece) + 'f' + str(frameNum)
    newDir = path.join(inputpath, newName, '')

    # copy previous input directory including input files
    copytree(currSim.input, newDir, symlinks=False, ignore=ignore_patterns('*.coor', '*.rst', '*.out', *_IGNORE_EXTENSIONS))

    # overwrite input file with new one. frameNum + 1 as catdcd does 1 based indexing

    mol = Molecule(currSim.molfile)  # Always read the mol file, otherwise it does not work if we need to save a PDB as coorname
    mol.read(traj)
    mol.dropFrames(keep=frameNum)  # Making sure only specific frame to write is kept
    mol.write(path.join(newDir, coorname))
Beispiel #20
0
    def createXPI(self, outDir=None, xpiName=None):
        if outDir is None:
            outDir = "."
        if xpiName is None:
            xpiName = "%s-%s.xpi" % (self.basename, self.install.get('version'))

        ignore = shutil.ignore_patterns('.*')
        defaults = ['defaults', 'modules', 'install.rdf']
        if os.path.exists(os.path.join(self.dataDir, '.xpiinclude')):
            defaults += [ filename.rstrip() for filename in open(os.path.join(self.dataDir, '.xpiinclude')).readlines()]

        for item in defaults:
            src = os.path.join(self.dataDir, item)
            if os.path.isdir(src):
                shutil.copytree(src, os.path.join(self.buildDir, item), False, ignore)
            else:
                shutil.copy2(src, os.path.join(self.buildDir, item))

        self.updateManifest()

        xpiFile = zipfile.ZipFile(os.path.join(outDir, xpiName), 'w', zipfile.ZIP_DEFLATED, False)
        for dirpath, dirs, files in os.walk(os.path.join(self.buildDir)):
            for fileName in files:
                relPath = os.path.join(dirpath, fileName)
                zipPath = os.path.join(os.path.relpath(dirpath, self.buildDir), fileName)
                xpiFile.write(relPath, zipPath)
        xpiFile.close()
        return xpiName
Beispiel #21
0
 def prepare_output(self):
     '''
     Copies static content to output and creates required directories.
     '''
     helper.log.dbg('Copying static content to output...')
     try:
         shutil.rmtree(OUTPUT)
     except OSError:
         pass
     copyignore = shutil.ignore_patterns('.git', '.svn', '*.swp', '_*')
     shutil.copytree(STATIC, OUTPUT, ignore=copyignore)
     imgdst = os.path.join(OUTPUT, 'images')
     shutil.copytree(IMAGES, imgdst, ignore=copyignore)
     try:
         os.mkdir(os.path.join(OUTPUT, 'security'))
     except OSError:
         pass
     try:
         os.mkdir(os.path.join(OUTPUT, 'css'))
     except OSError:
         pass
     try:
         os.mkdir(os.path.join(OUTPUT, 'js'))
     except OSError:
         pass
def js():
    """Combine and minify RequireJS modules"""

    if (env.is_remote):
        abort('You cannot build js files remotely!\n'
              'This should be done in your local development env.')

    import os
    from shutil import copytree, rmtree, ignore_patterns
    collect_js()

    proj_path = os.path.join(os.path.dirname(__file__), '../mootiro_maps')
    build_path = os.path.join(proj_path, '../.build')
    local('r.js -o {}'.format(os.path.join(proj_path, '../app.build.js')))
    from_ = os.path.join(build_path, 'min')
    to = os.path.join(proj_path, 'static', 'js.build')
    try:
        rmtree(to)
    except OSError:
        pass
    logging.info('copying compiled javascripts to {}'.format(to))
    copytree(from_, to, ignore=ignore_patterns('*.coffee', '*~'))

    # Removes the build dir
    rmtree(build_path)

    test_js()
Beispiel #23
0
    def createJar(self, jarName=None):
        if jarName is None:
            jarName = "%s.jar" % self.basename

        chromeDir = os.path.join(self.buildDir, 'chrome')
        if not os.path.exists(chromeDir):
            os.makedirs(chromeDir)
        jarFile = zipfile.ZipFile(os.path.join(chromeDir, jarName), 'w', zipfile.ZIP_STORED, False)

        ignore = shutil.ignore_patterns('.*')
        jarDirs = self.manifest.jarDirs()

        for dir in jarDirs:
            shutil.copytree(os.path.join(self.dataDir, dir), os.path.join(self.buildDir, dir), False, ignore)
            for dirpath, dirs, files in os.walk(os.path.join(self.buildDir, dir)):
                if dirpath != chromeDir:
                    for fileName in files:
                        relPath = os.path.join(dirpath, fileName)
                        zipPath = os.path.join(os.path.relpath(dirpath, self.buildDir), fileName)
                        jarFile.write(relPath, zipPath)
        jarFile.close()

        # Remove redundant directories
        for dir in jarDirs:
            shutil.rmtree(os.path.join(self.buildDir, dir))
        self.manifest.setJar(jarName)
Beispiel #24
0
def copy_packages(packages_names, dest, create_links=False, extra_ignores=None):
    """Copy python packages ``packages_names`` to ``dest``, spurious data.

    Copy will happen without tests, testdata, mercurial data or C extension module source with it.
    ``py2app`` include and exclude rules are **quite** funky, and doing this is the only reliable
    way to make sure we don't end up with useless stuff in our app.
    """
    if ISWINDOWS:
        create_links = False
    if not extra_ignores:
        extra_ignores = []
    ignore = shutil.ignore_patterns('.hg*', 'tests', 'testdata', 'modules', 'docs', 'locale', *extra_ignores)
    for package_name in packages_names:
        if op.exists(package_name):
            source_path = package_name
        else:
            mod = __import__(package_name)
            source_path = mod.__file__
            if mod.__file__.endswith('__init__.py'):
                source_path = op.dirname(source_path)
        dest_name = op.basename(source_path)
        dest_path = op.join(dest, dest_name)
        if op.exists(dest_path):
            if op.islink(dest_path):
                os.unlink(dest_path)
            else:
                shutil.rmtree(dest_path)
        print("Copying package at {0} to {1}".format(source_path, dest_path))
        if create_links:
            os.symlink(op.abspath(source_path), dest_path)
        else:
            if op.isdir(source_path):
                shutil.copytree(source_path, dest_path, ignore=ignore)
            else:
                shutil.copy(source_path, dest_path)
 def copy_files(scripts_sub_dir):
     src = os.path.join(self.pylucid_dir, "scripts", scripts_sub_dir)
     print "%s -> %s" % (src, self.dest_package_dir)
     try:
         copytree2(src, self.dest_package_dir, ignore=shutil.ignore_patterns(*COPYTREE_IGNORE_FILES))
     except OSError, why:
         print "copytree2 error: %s" % why
Beispiel #26
0
    def prepublish(self, new_version, atom_semver):
        logging.info('Publishing %s to npm at version %s', self.get_package_name(), new_version)

        # Create temporary directory and copy package into it (without dependencies).
        package = self._config.package_directory
        logging.info('Copying %s to tmpdir', self.get_package_name())
        shutil.copytree(package, self._tmp_package, ignore=shutil.ignore_patterns('node_modules'))

        # Make sure that standard boilerplate files are included in the repo.
        for name, src in self._boilerplate_files.items():
            shutil.copyfile(
                src,
                os.path.join(self._tmp_package, name))

        # Load package.json and rewrite version number within it.
        package_file = os.path.join(self._tmp_package, 'package.json')
        package = json_load(package_file)
        package = update_package_json_versions(self.get_package_name(), package,
            self._config.nuclide_npm_package_names, new_version)

        # Specify the license if it is not already specified.
        if 'license' not in package:
            package['license'] = 'SEE LICENSE IN LICENSE'

        # Write the adjusted package file back to the temporary directory and publish it.
        json_dump(package, package_file)

        # Pre-transpile Babel files, as appropriate.
        self._transpiler.transpile_in_place(self.get_package_name(), self._tmp_package)

        rewrite_shrinkwrap_file(self._tmp_package,
            package, self._config.nuclide_npm_package_names, new_version)
Beispiel #27
0
def svn_source(meta):
    """ Download a source from SVN repo. """

    def parse_bool(s):
        return str(s).lower().strip() in ("yes", "true", "1", "on")

    svn = external.find_executable("svn")
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta["svn_url"]
    svn_revision = meta.get("svn_rev") or "head"
    svn_ignore_externals = parse_bool(meta.get("svn_ignore_externals") or "no")
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_")
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ["--ignore-externals"]
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, "up", "-r", svn_revision] + extra_args, cwd=cache_repo)
    else:
        check_call([svn, "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo])
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR, ignore=ignore_patterns(".svn"))
    return WORK_DIR
Beispiel #28
0
    def add_ancestor(self, artifact):
        other_path = artifact._archiver.provenance_dir
        if other_path is None:
            # The artifact doesn't have provenance (e.g. version 0)
            # it would be possible to invent a metadata.yaml, but we won't know
            # the framework version for the VERSION file. Even if we did
            # it won't accomplish a lot and there shouldn't be enough
            # version 0 artifacts in the wild to be important in practice.
            # NOTE: this implies that it is possible for an action.yaml file to
            # contain an artifact UUID that is not in the artifacts/ directory.
            return NoProvenance(artifact.uuid)

        destination = self.ancestor_dir / str(artifact.uuid)
        # If it exists, then the artifact is already in the provenance
        # (and so are its ancestors)
        if not destination.exists():
            # Handle root node of ancestor
            shutil.copytree(
                str(other_path), str(destination),
                ignore=shutil.ignore_patterns(self.ANCESTOR_DIR + '*'))

            # Handle ancestral nodes of ancestor
            grandcestor_path = other_path / self.ANCESTOR_DIR
            if grandcestor_path.exists():
                for grandcestor in grandcestor_path.iterdir():
                    destination = self.ancestor_dir / grandcestor.name
                    if not destination.exists():
                        shutil.copytree(str(grandcestor), str(destination))

        return str(artifact.uuid)
def install(opts):
    if os.path.exists("bin\\Win32_Release"):
        print ("Installing...")
        if os.path.exists(opts.mangos_destdir):
            for name in os.listdir("bin\\Win32_Release"):
                if fnmatch.fnmatch(name, '*.map'):
                    pass
                elif fnmatch.fnmatch(name, '*.pdb'):
                    pass
                elif fnmatch.fnmatch(name, '*.exp'):
                    pass
                else:
                    srcname = os.path.join("bin\\Win32_Release", name)
                    destname = os.path.join(opts.mangos_destdir, name)
                    shutil.copy2(srcname, destname)
        else:
            shutil.copytree("bin\\Win32_Release", opts.mangos_destdir, ignore=shutil.ignore_patterns('*.map', '*.pdb', '*.exp'))

        shutil.copyfile("src\\mangosd\\mangosd.conf.dist.in", opts.mangos_destdir+"\\mangosd.conf.dist")
        if not os.path.exists(opts.mangos_destdir+"\\mangosd.conf"):
            shutil.copyfile("src\\mangosd\\mangosd.conf.dist.in", opts.mangos_destdir+"\\mangosd.conf")
        shutil.copyfile("src\\realmd\\realmd.conf.dist.in", opts.mangos_destdir+"\\realmd.conf.dist")
        if not os.path.exists(opts.mangos_destdir+"\\realmd.conf"):
            shutil.copyfile("src\\realmd\\realmd.conf.dist.in", opts.mangos_destdir+"\\realmd.conf")
        shutil.copyfile("src\\bindings\\ScriptDev2\\scriptdev2.conf.dist.in", opts.mangos_destdir+"\\scriptdev2.conf.dist")
        if not os.path.exists(opts.mangos_destdir+"\\scriptdev2.conf"):
            shutil.copyfile("src\\bindings\\ScriptDev2\\scriptdev2.conf.dist.in", opts.mangos_destdir+"\\scriptdev2.conf")
Beispiel #30
0
def buildPackageFolder(folderName):
    buildDir=tmpDir+folderName+'_build'
    buildBinDir=buildDir+'/usr/share/qtodotxt/bin/'
    debianDir=buildDir+'/DEBIAN/'

    # Tree structure
    os.makedirs(debianDir)
    os.makedirs(buildDir+'/usr/bin/')
    os.makedirs(buildDir+'/usr/share/doc/qtodotxt')
    os.makedirs(buildDir+'/usr/share/applications')

    #Copy tag folder to build folder except the windows script
    copytree(tmpDir+folderName,buildDir+'/usr/share/qtodotxt',False,ignore_patterns('qtodotxt.pyw'))
    #Fix execution rights on bin folder
    for file in os.listdir(buildBinDir):
        filePath=os.path.join(buildBinDir,file)
        if os.path.isfile(filePath):
            st = os.stat(filePath)
            os.chmod(filePath, st.st_mode | S_IEXEC)

    # Adding copyright file
    copy(scriptDir+'/copyright',buildDir+'/usr/share/doc/qtodotxt/copyright')
    # Adding desktop file
    copy(scriptDir+'/qtodotxt.desktop',buildDir+'/usr/share/applications/qtodotxt.desktop')
    # Adding changelog file
    f_in = open(scriptDir+'/changelog', 'rb')
    f_out = gzip.open(buildDir+'/usr/share/doc/qtodotxt/changelog.gz', 'wb')
    f_out.writelines(f_in)
    f_out.close()
    f_in.close()

    return (buildDir,debianDir)
              'Delta1={average.delta1:.3f}\n'
              'REL={average.absrel:.3f}\n'
              'Lg10={average.lg10:.3f}\n'
              't_GPU={time:.3f}'.format(average=avg, time=avg.gpu_time))
        if is_best and mode == "val":
            print("New best model by %s (was %.3f)" %
                  (self.args.rank_metric,
                   self.get_ranking_error(self.old_best_result)))
        elif mode == "val":
            print("(best %s is %.3f)" %
                  (self.args.rank_metric,
                   self.get_ranking_error(self.best_result)))
        print("*\n")


ignore_hidden = shutil.ignore_patterns(".", "..", ".git*", "*pycache*",
                                       "*build", "*.fuse*", "*_drive_*")


def backup_source_code(backup_directory):
    if os.path.exists(backup_directory):
        shutil.rmtree(backup_directory)
    shutil.copytree('..', backup_directory, ignore=ignore_hidden)


def adjust_learning_rate(lr_init, optimizer, epoch):
    """Sets the learning rate to the initial LR decayed by 10 every 5 epochs"""
    lr = lr_init * (0.5**(epoch // 5))
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr
    return lr
Beispiel #32
0
    def simulate(self):
        '''Simulates the model.

        This method
          1. Deletes dymola output files
          2. Copies the current directory, or the directory specified by the ``packagePath``
             parameter of the constructor, to a temporary directory.
          3. Writes a Modelica script to the temporary directory.
          4. Starts the Modelica simulation environment from the temporary directory.
          5. Translates and simulates the model.
          6. Closes the Modelica simulation environment.
          7. Copies output files and deletes the temporary directory.

        This method requires that the directory that contains the executable *dymola*
        is on the system PATH variable. If it is not found, the function returns with
        an error message.

        '''
        import os
        import shutil

        # Delete dymola output files
        self.deleteOutputFiles()

        # Get directory name. This ensures for example that if the directory is called xx/Buildings
        # then the simulations will be done in tmp??/Buildings
        worDir = self._create_worDir()
        self._simulateDir_ = worDir
        # Copy directory
        shutil.copytree(os.path.abspath(self._packagePath),
                        worDir,
                        ignore=shutil.ignore_patterns('*.svn', '*.git'))

        # Construct the model instance with all parameter values
        # and the package redeclarations
        dec = self._declare_parameters()
        dec.extend(self._modelModifiers_)

        mi = '"{mn}({dec})"'.format(mn=self.modelName, dec=','.join(dec))

        try:
            # Write the Modelica script
            runScriptName = os.path.join(worDir, "run.mos")
            with open(runScriptName, mode="w", encoding="utf-8") as fil:
                fil.write(
                    self._get_dymola_commands(working_directory=worDir,
                                              log_file="simulator.log",
                                              model_name=mi,
                                              translate_only=False))
            # Copy files to working directory

            # Run simulation
            self._runSimulation(runScriptName, self._simulator_.get('timeout'),
                                worDir)
            self._check_simulation_errors(worDir)
            self._copyResultFiles(worDir)
            self._deleteTemporaryDirectory(worDir)
        except Exception as e:  # Catch all possible exceptions
            em = "Simulation failed in '{worDir}'\n   Exception: {exc}.\n   You need to delete the directory manually.\n".format(
                worDir=worDir, exc=str(e))
            self._reporter.writeError(em)
Beispiel #33
0
                sys.stdout = new_stdout

            # If we specified a relocated directory to run the test suite, do
            # the extra housekeeping to copy the testdirs to a configStringified
            # directory and to update sys.path before invoking the test runner.
            # The purpose is to separate the configuration-specific directories
            # from each other.
            if rdir:
                from shutil import copytree, ignore_patterns

                newrdir = "%s.%s" % (rdir, configPostfix)

                # Copy the tree to a new directory with postfix name configPostfix.
                copytree(rdir,
                         newrdir,
                         ignore=ignore_patterns('*.pyc', '*.o', '*.d'))

                # Update the LLDB_TEST environment variable to reflect new top
                # level test directory.
                #
                # See also lldbtest.TestBase.setUpClass(cls).
                if len(testdirs) == 1 and os.path.basename(
                        testdirs[0]) == 'test':
                    os.environ["LLDB_TEST"] = os.path.join(newrdir, 'test')
                else:
                    os.environ["LLDB_TEST"] = newrdir

                # And update the Python search paths for modules.
                sys.path = [x.replace(rdir, newrdir, 1) for x in old_sys_path]

            # Output the configuration.
Beispiel #34
0
from os.path import join, exists, abspath
from shutil import copytree, ignore_patterns, move

import scrapy
from scrapy.commands import ScrapyCommand
from scrapy.utils.template import render_templatefile, string_camelcase
from scrapy.exceptions import UsageError

TEMPLATES_TO_RENDER = (
    ('scrapy.cfg', ),
    ('${project_name}', 'settings.py.tmpl'),
    ('${project_name}', 'items.py.tmpl'),
    ('${project_name}', 'pipelines.py.tmpl'),
)

IGNORE = ignore_patterns('*.pyc', '.svn')


class Command(ScrapyCommand):

    requires_project = False
    default_settings = {'LOG_ENABLED': False}

    def syntax(self):
        return "<project_name>"

    def short_desc(self):
        return "Create new project"

    def _is_valid_name(self, project_name):
        def _module_exists(module_name):
def generate_gradle_build(build_path, json_config):
    """Generates a gradle project that can build from a set of dependencies.

  Args:
    build_path: The path to create the gradle build project. If it already
      exists, it will regenerate the project and preserve existing
      intermediates. This allows incremenetal builds.
    json_config: A json object containing:
      * config: A dictionary of string {var} macro replacements.
      * project_deps: An array of dependencies as
        [namespace, package, version] triplets.
      * m2paths: Optional array of extra m2repository search paths.
        The m2repositories are copied and support aars listed as .srcaar.
  """
    config = json_config.get("config")

    # Create the output folder.
    if not os.path.exists(build_path):
        os.makedirs(build_path)

    # Unzip the template files.
    template_zip_path = os.path.join(os.path.dirname(__file__), TEMPLATE_ZIP)
    zip_ref = zipfile.ZipFile(template_zip_path, "r")
    zip_ref.extractall(build_path)

    # Python's zipfile.extract doesn't preserve file mode, so we need to set the
    # attributes to preserve the execution mode of the gradle script.
    for f in zip_ref.infolist():
        path = os.path.join(build_path, f.filename)
        mode = f.external_attr >> 16 & 0xFFF
        os.chmod(path, mode)

    # Copy the m2 repositories locally, if there are any, and handle renaming
    # any packages with .srcaar extensions to .aar.
    m2paths = json_config.get("extra_m2repositories")
    dest_repo_path = ""
    if m2paths:
        # Build the local m2repository path, that we'll copy everything into.
        dest_repo_path = os.path.join(build_path, M2REPO_LOCAL_COPY)

        # Delete and recopy everything to prevent from accumulating stale packages.
        if os.path.exists(dest_repo_path):
            shutil.rmtree(dest_repo_path)
        for m2path in m2paths:
            merge_dir_tree(m2path,
                           dest_repo_path,
                           ignore=shutil.ignore_patterns("*.meta"))

        # Replace any packages with .srcaar extensions with .aar.
        _fix_package_names(dest_repo_path)

    # Merge all passed in proguard configs into a USER config
    extra_proguard_configs = json_config.get("extra_proguard_configs")
    if extra_proguard_configs:
        dest_proguard_config = os.path.join(build_path, PROGUARD_USER_FILE)
        with open(dest_proguard_config, "a") as output_config:
            for proguard_config in extra_proguard_configs:
                with open(proguard_config, "r") as input_config:
                    output_config.write(input_config.read())

    # Gradle doesn't seem to expand env vars in local.properties, so we'll just do
    # the expansion here.
    config["android_sdk_dir"] = os.path.expandvars(config["android_sdk_dir"])

    # This creates a handler for basic named variable substitution.
    replace_with_config_vars = lambda template: str.format(template, **config)
    patch_gradle_build_with_config = PatchGradleBuildTemplate(
        json_config, M2REPO_LOCAL_COPY)

    # Replace the variables in the templates.
    write_template_vars(os.path.join(build_path, MANIFEST_TEMPLATE),
                        replace_with_config_vars)
    write_template_vars(os.path.join(build_path, LOCAL_PROPS_TEMPLATE),
                        replace_with_config_vars)
    write_template_vars(os.path.join(build_path, BUILD_TEMPLATE),
                        patch_gradle_build_with_config)
Beispiel #36
0
    pkg_resources.require(dependencies)

    # Generate a unique id for this experiment.
    id = str(uuid.uuid4())

    # If the user provided an app name, use it everywhere that's user-facing.
    if app:
        id_long = id
        id = str(app)

    log("Experiment id is " + id + "")

    # Copy this directory into a temporary folder, ignoring .git
    dst = os.path.join(tempfile.mkdtemp(), id)
    to_ignore = shutil.ignore_patterns(".git/*", "*.db", "snapshots", "data",
                                       "server.log")
    shutil.copytree(os.getcwd(), dst, ignore=to_ignore)

    click.echo(dst)

    # Save the experiment id
    with open(os.path.join(dst, "experiment_id.txt"), "w") as file:
        if app:
            file.write(id_long)
        else:
            file.write(id)

    # Zip up the temporary directory and place it in the cwd.
    if not debug:
        log("Freezing the experiment package...")
        shutil.make_archive(os.path.join("snapshots", id + "-code"), "zip",
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
   File Name:    shutil_m
   Description :
   Author :       CBH
   date:         2020/7/20 11: 05
   Ide:           PyCharm
-------------------------------------------------
   Change Activity:
                   2020/7/20 11: 05:
-------------------------------------------------
"""
import shutil
shutil.copy2(r'C:\Users\CBH\PycharmProjects\tensorflow\auto.log',r'C:\Users\CBH\Desktop\work_job\BI') #复制文件到指定目录
#拷贝目录
shutil.copytree(r'C:\Users\CBH\PycharmProjects\tensorflow\sbh\gshj\util',
                r'C:\Users\CBH\PycharmProjects\tensorflow\sbh\gshj\util2',
                ignore=shutil.ignore_patterns("*.pyc"))#排除指定文件
#rmtree --删除文件
shutil.move('path','target_path',copy_function=shutil.copy2)

#获取磁盘使用空间
total,use,free = shutil.disk_usage('.')
print('当前磁盘%iGB,使用%iGB,剩余%iGB'%( total / 1073741824,use / 1073741824,free / 1073741824,))


#压缩文件
shutil.make_archive('压缩文件名','zip','待压缩文件路径')
Beispiel #38
0
def custom_copytree(src,
                    dst,
                    symlinks=True,
                    ignored_patterns=None,
                    ignore_dangling_symlinks=True,
                    logger=None,
                    log_copied_file=False,
                    relative_path="",
                    overwrite=False):
    """Recursively copy a directory tree.

    This function is basically the same as :any:`shutil.copytree`, but with the following
    differences:

    - It copies directories whether the destination directory exists or not.
    - It uses a custom function to copy symlinks (:any:`copy_create_symlink`), it not just uses \
    :any:`os.symlink` directly.
    - Switched the *ignore* parameter (originally a method) into *ignored_patterns* (now a list \
    of file patterns). Just for the kick of it, not really needed.

    Parameters
    ----------
    src : str
        Source directory.
    dst : str
        Destination directory.
    symlinks : bool, optional
        Handle symlinks.
    ignored_patterns : None, optional
         A list of file name patterns to be ignored by the copy functions.
    ignore_dangling_symlinks : bool, optional
        Whether to ignore dangling symlinks.
    logger : LogSystem
        The logger.
    log_copied_file : bool, optional
        See :any:`custom_copy2` > log_copied_file parameter.
    relative_path : str, optional
        A relative path to exctract from the path that's going to be logged.
    overwrite : bool, optional
        Overwrite existent files without doing any checks.

    Returns
    -------
    str
        The destination if no errors were raised.

    Raises
    ------
    exceptions.Error
        A list of errors after all items in a directory were processed.
    """
    names = os.listdir(src)

    try:
        if ignored_patterns is not None:
            ignored_names = ignore_patterns(*ignored_patterns)(src, names)
        else:
            ignored_names = set()

        if not os.path.exists(dst):
            os.makedirs(dst)
    except Exception as err:
        logger.error(err)

    errors = []

    for name in names:
        if name in ignored_names:
            continue

        srcname = os.path.join(src, name)
        dstname = os.path.join(dst, name)

        try:
            if os.path.islink(srcname):
                linkto = os.readlink(srcname)

                if symlinks:
                    # os.symlink(linkto, dstname)
                    # copystat(srcname, dstname, follow_symlinks=not symlinks)
                    # Let :any:`copy_create_symlink` take care of symlinks. With the approach taken by
                    # the original :any:`shutil.copytree` function, I'm constantly spammed with
                    # useless errors thrown by the direct use of :any:`os.symlink`.
                    # I fixed this nuisance by simply getting rid of the destination.
                    # MOVING ON!!!
                    copy_create_symlink(srcname,
                                        dstname,
                                        source_is_symlink=True,
                                        logger=logger,
                                        follow_symlinks=not symlinks)
                else:
                    # Ignore dangling symlink if the flag is on
                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
                        continue
                    # Otherwise let the copy occurs. copy2 will raise an error
                    if os.path.isdir(srcname):
                        custom_copytree(srcname,
                                        dstname,
                                        symlinks=symlinks,
                                        ignored_patterns=ignored_patterns,
                                        logger=logger,
                                        log_copied_file=log_copied_file,
                                        relative_path=relative_path)
                    else:
                        custom_copy2(srcname,
                                     dstname,
                                     logger=logger,
                                     log_copied_file=log_copied_file,
                                     relative_path=relative_path,
                                     overwrite=overwrite)
            elif os.path.isdir(srcname):
                custom_copytree(srcname,
                                dstname,
                                symlinks=symlinks,
                                ignored_patterns=ignored_patterns,
                                logger=logger,
                                log_copied_file=log_copied_file,
                                relative_path=relative_path,
                                overwrite=overwrite)
            else:
                # Will raise a SpecialFileError for unsupported file types
                custom_copy2(srcname,
                             dstname,
                             logger=logger,
                             log_copied_file=log_copied_file,
                             relative_path=relative_path,
                             overwrite=overwrite)
        # Catch the Error from the recursive custom_copytree so that we can
        # continue with other files
        except exceptions.Error as err:
            errors.extend(err.args[0])
        except OSError as why:
            errors.append((srcname, dstname, str(why)))

    try:
        copystat(src, dst)
    except OSError as why:
        # Copying file access times may fail on Windows
        if getattr(why, "winerror", None) is None:
            errors.append((src, dst, str(why)))

    if errors:
        raise exceptions.Error(errors)

    return dst
Beispiel #39
0
    def test_copytree_with_exclude(self):
        def write_data(path, data):
            f = open(path, "w")
            f.write(data)
            f.close()

        def read_data(path):
            f = open(path)
            data = f.read()
            f.close()
            return data

        # creating data
        join = os.path.join
        exists = os.path.exists
        src_dir = tempfile.mkdtemp()
        try:
            dst_dir = join(tempfile.mkdtemp(), 'destination')
            write_data(join(src_dir, 'test.txt'), '123')
            write_data(join(src_dir, 'test.tmp'), '123')
            os.mkdir(join(src_dir, 'test_dir'))
            write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2'))
            write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
            write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'), '456')
            write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'), '456')

            # testing glob-like patterns
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(exists(join(dst_dir, 'test.txt')))
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir2')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)

            # testing callable-style
            try:

                def _filter(src, names):
                    res = []
                    for name in names:
                        path = os.path.join(src, name)

                        if (os.path.isdir(path)
                                and path.split()[-1] == 'subdir'):
                            res.append(name)
                        elif os.path.splitext(path)[-1] in ('.py'):
                            res.append(name)
                    return res

                shutil.copytree(src_dir, dst_dir, ignore=_filter)

                # checking the result: some elements should not be copied
                self.assertTrue(not exists(
                    join(dst_dir, 'test_dir2', 'subdir2', 'test.py')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))

            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
        finally:
            shutil.rmtree(src_dir)
            shutil.rmtree(os.path.dirname(dst_dir))
Beispiel #40
0
def generate_cert_request(collaborator_name, data_path, silent, skip_package):
    """
    Create collaborator certificate key pair.

    Then create a package with the CSR to send for signing.
    """
    from openfl.cryptography.participant import generate_csr
    from openfl.cryptography.io import write_crt, write_key

    common_name = f'{collaborator_name}'.lower()
    subject_alternative_name = f'DNS:{common_name}'
    file_name = f'col_{common_name}'

    echo(
        f'Creating COLLABORATOR certificate key pair with following settings: '
        f'CN={style(common_name, fg="red")},'
        f' SAN={style(subject_alternative_name, fg="red")}')

    client_private_key, client_csr = generate_csr(common_name, server=False)

    (PKI_DIR / 'client').mkdir(parents=True, exist_ok=True)

    echo('  Moving COLLABORATOR certificate to: ' +
         style(f'{PKI_DIR}/{file_name}', fg='green'))

    # Write collaborator csr and key to disk
    write_crt(client_csr, PKI_DIR / 'client' / f'{file_name}.csr')
    write_key(client_private_key, PKI_DIR / 'client' / f'{file_name}.key')

    if not skip_package:
        from shutil import make_archive, copytree, ignore_patterns
        from tempfile import mkdtemp
        from os.path import join, basename
        from os import remove
        from glob import glob

        archiveType = 'zip'
        archiveName = f'col_{common_name}_to_agg_cert_request'
        archiveFileName = archiveName + '.' + archiveType

        # Collaborator certificate signing request
        tmpDir = join(mkdtemp(), 'openfl', archiveName)

        ignore = ignore_patterns('__pycache__', '*.key', '*.srl', '*.pem')
        # Copy the current directory into the temporary directory
        copytree(f'{PKI_DIR}/client', tmpDir, ignore=ignore)

        for f in glob(f'{tmpDir}/*'):
            if common_name not in basename(f):
                remove(f)

        # Create Zip archive of directory
        make_archive(archiveName, archiveType, tmpDir)

        echo(f'Archive {archiveFileName} with certificate signing'
             f' request created')
        echo('This file should be sent to the certificate authority'
             ' (typically hosted by the aggregator) for signing')

    # TODO: There should be some association with the plan made here as well
    RegisterDataPath(common_name, data_path=data_path, silent=silent)
Beispiel #41
0
parser.add_argument('-s',
                    action='store',
                    dest='src',
                    metavar='SRC',
                    required=True)
parser.add_argument('-d', action='store', dest='dst', metavar='DST')
parser.add_argument('-i',
                    action='store',
                    dest='ignore',
                    nargs='*',
                    metavar='IGNORE')
parser.add_argument('-u',
                    action='store',
                    dest='usbPath',
                    metavar='USB_PATH',
                    default=usbPath)

args = parser.parse_args()
src = pathlib.Path('~', args.src).expanduser()
if args.dst:
    dst = pathlib.Path(args.usbPath, args.dst)
else:
    dst = pathlib.Path(args.usbPath, args.src)

if dst.exists():
    shutil.rmtree(dst)

if args.ignore:
    shutil.copytree(src, dst, ignore=shutil.ignore_patterns(*args.ignore))
else:
    shutil.copytree(src, dst)
Beispiel #42
0
    from shutil import copytree, rmtree, ignore_patterns

    ## Get the project base path
    proj_path = os.path.join(os.path.dirname(__file__), '..')
    build_path = os.path.join(proj_path, '.build')

    try:
        logging.info('cleaning build path ... ')
        rmtree(build_path)
    except OSError, e:
        logging.info(e)

    logging.info('copying javascript files ... ')
    from_ = os.path.join(proj_path, 'static', 'js')
    to = build_path
    copytree(from_, to, ignore=ignore_patterns('*.coffee', '*~'))


def build_js():
    """Combine and minify RequireJS modules"""
    import os
    from shutil import copytree, rmtree, ignore_patterns
    collect_js()

    proj_path = os.path.join(os.path.dirname(__file__), '..')
    build_path = os.path.join(proj_path, '.build')
    local('r.js -o app.build.js')
    from_ = os.path.join(build_path, 'min')
    to = os.path.join(proj_path, 'static', 'js.build')
    rmtree(to)
    logging.info('copying compiled javascripts to {}'.format(to))
Beispiel #43
0
                    (def_opt, def_value))
    else:
        print("Adding additional flag: %s=%s" % (ext_opt, ext_value))
        extra_command_options[ext_opt] = ext_value

# Start the timer
start = time.time()

# Change to the build directory
os.chdir(build_dir)

# Copy the source tree to the temporary folder - makes build 3x faster than building against the mount on Windows
print("")
print("Copying source from " + output_dir + " to " + source_dir + " ...")
print("")
shutil.copytree(output_dir, source_dir, ignore=shutil.ignore_patterns(".git"))
# since we have not copied the .git folder, as this saves lots of time, cmake cannot determine GIT_STR.
# We thus fake it by copying over what is needed, and making it a valid git repo.
shutil.copytree(output_dir + "/.git/refs/heads",
                source_dir + "/.git/refs/heads")
shutil.copyfile(output_dir + "/.git/HEAD", source_dir + "/.git/HEAD")
proc = subprocess.Popen(["git", "init", source_dir])
proc.wait()

# Prepare the cmake command
cmd = ["cmake"]

# Append the default flags
for opt, value in firmware_options.items():
    cmd.append("-D%s=%s" % (opt, value))
Beispiel #44
0
regex_dot = re.compile(r'^\.')
regex_z = re.compile(r'^z-')

filtered_artist_source = [i for i in artist_source if not regex_dot.match(i)]
filtered_artist_dest = [i for i in artist_dest if not regex_dot.match(i)]

for artist in filtered_artist_source:
    copy_artist_source = '%s/%s' % (music_source, artist)
    copy_artist_dest = '%s/%s' % (music_dest, artist)
    
    if artist not in filtered_artist_dest:
        copytree(
            copy_artist_source, 
            copy_artist_dest, 
            symlinks=False, 
            ignore=ignore_patterns('z-*')
        )
        print "Copying artist %s ..." % artist.upper() 
    else:
        album_source = os.listdir(copy_artist_source)
        album_dest = os.listdir(copy_artist_dest)
        filtered_album_source = [i for i in album_source if not regex_z.match(i)]
        filtered_album_dest = [i for i in album_dest if not regex_z.match(i)]
        
        for album in filtered_album_source:
            copy_album_source = '%s/%s' % (copy_artist_source, album)
            copy_album_dest = '%s/%s' % (copy_artist_dest, album)
            
            if album not in filtered_album_dest:
                copytree(
                    copy_album_source, 
Beispiel #45
0
import os
import datetime
import shutil
from shutil import copytree, ignore_patterns

files = os.listdir('/media/optimus/')

destination = '/home/<username>/Backup/back_%s' % datetime.datetime.now()
try:
    for f in files:
        source = '/media/optimus/%s' % f
        copytree(source, destination, ignore=ignore_patterns('*.pyc', 'tmp*'))
except Exception as e:
    print e
            # Get directories from previous phase to this directory
            # This is for MOSS calculations
            # For phase 1-2, get code from phase 1
            # For phase 2-3, 3-4 get code from previous cross-phase
            if curPhase == 2:
                for prevPhaseProject in os.listdir(
                        os.path.join(homedir, prevPhaseDirectory)):
                    if os.path.isdir(os.path.join(homedir, prevPhaseDirectory, prevPhaseProject)) \
                     and not prevPhaseProject.startswith('.'):
                        src = os.path.join(homedir, prevPhaseDirectory,
                                           prevPhaseProject)
                        dest = os.path.join(os.getcwd(), prevPhaseProject)
                        print("Copying directory {0} to {1}".format(src, dest))
                        shutil.copytree(src,
                                        dest,
                                        ignore=shutil.ignore_patterns('.*'))
            else:
                prevPhaseDirectoryComplete = os.path.join(
                    homedir, "cross-phase", prevCrossPhaseDirectory)
                print(prevPhaseDirectoryComplete)
                for prevPhaseProject in os.listdir(prevPhaseDirectoryComplete):
                    if os.path.isdir(os.path.join(prevPhaseDirectoryComplete, prevPhaseProject))\
                     and not prevPhaseProject.startswith('.') and prevPhaseProject.endswith('-cp'):
                        src = os.path.join(prevPhaseDirectoryComplete,
                                           prevPhaseProject)
                        dest = os.path.join(os.getcwd(), prevPhaseProject[:-3])
                        print("Copying directory {0} to {1}".format(src, dest))
                        shutil.copytree(src,
                                        dest,
                                        ignore=shutil.ignore_patterns('.*'))
Beispiel #47
0
def export_():
    """Export federated learning workspace."""
    from shutil import make_archive, copytree, copy2, ignore_patterns
    from tempfile import mkdtemp
    from os import getcwd, makedirs
    from os.path import basename, join
    from plan import FreezePlan

    # TODO: Does this need to freeze all plans?
    planFile = 'plan/plan.yaml'
    try:
        FreezePlan(planFile)
    except Exception:
        echo(f'Plan file "{planFile}" not found. No freeze performed.')

    requirements_filename = 'requirements.txt'
    workspace_reqs = _get_requirements_dict(requirements_filename)
    prefix = getcwd()

    export_requirements_filename = 'requirements.export.txt'
    with open(export_requirements_filename, "w") as f:
        check_call([executable, "-m", "pip", "freeze"], shell=False, stdout=f)
    workspace_hash = _get_dir_hash(prefix)
    origin_dict = _get_requirements_dict(OPENFL_USERDIR /
                                         f'requirements.{workspace_hash}.txt')
    current_dict = _get_requirements_dict(export_requirements_filename)
    with open(export_requirements_filename, "w") as f:
        for package, version in current_dict.items():
            if (package in workspace_reqs or package not in origin_dict
                    or version != origin_dict[package]):
                # we save only the difference between original workspace after
                # 'fx create workspace' and current one.
                echo(f'Writing {package}=={version} '
                     f'to {requirements_filename}...')
                f.write(f'{package}=={version}\n')
            elif version is None:  # local dependency
                warn(f'Could not generate requirements for {package}.'
                     f' Consider installing it manually after workspace'
                     f' import.')
    echo(f'{export_requirements_filename} written.')

    archiveType = 'zip'
    archiveName = basename(getcwd())
    archiveFileName = archiveName + '.' + archiveType

    # Aggregator workspace
    tmpDir = join(mkdtemp(), 'openfl', archiveName)

    ignore = ignore_patterns('__pycache__', '*.crt', '*.key', '*.csr', '*.srl',
                             '*.pem', '*.pbuf')

    # We only export the minimum required files to set up a collaborator
    makedirs(f'{tmpDir}/save', exist_ok=True)
    makedirs(f'{tmpDir}/logs', exist_ok=True)
    makedirs(f'{tmpDir}/data', exist_ok=True)
    copytree('./code', f'{tmpDir}/code', ignore=ignore)  # code
    copytree('./plan', f'{tmpDir}/plan', ignore=ignore)  # plan
    copy2(export_requirements_filename,
          f'{tmpDir}/requirements.txt')  # requirements

    try:
        copy2('.workspace', tmpDir)  # .workspace
    except FileNotFoundError:
        echo('\'.workspace\' file not found.')
        if confirm('Create a default \'.workspace\' file?'):
            copy2(WORKSPACE / 'workspace' / '.workspace', tmpDir)
        else:
            echo('To proceed, you must have a \'.workspace\' '
                 'file in the current directory.')
            raise

    # Create Zip archive of directory
    make_archive(archiveName, archiveType, tmpDir)

    echo(f'Workspace exported to archive: {archiveFileName}')
Beispiel #48
0
def main():
    global VERSION
    print("\nIPlug Project Duplicator v" + VERSION +
          " by Oli Larkin ------------------------------\n")

    numargs = len(sys.argv) - 1

    if not (numargs == 3 or numargs == 4):
        print(
            "Usage: duplicate.py inputprojectname outputprojectname manufacturername (outputprojectpath)"
        )
        sys.exit(1)
    else:
        inputprojectname = sys.argv[1]
        outputprojectname = sys.argv[2]
        manufacturer = sys.argv[3]

    if numargs == 4:
        outputbasepath = os.path.abspath(sys.argv[4])
    else:
        outputbasepath = os.getcwd()

    if not (os.path.isdir(outputbasepath)):
        print("error: Output path does not exist")
        sys.exit(1)

    outputpath = os.path.join(outputbasepath, outputprojectname)

    if ' ' in inputprojectname:
        print("error: input project name has spaces")
        sys.exit(1)

    if inputprojectname not in os.listdir(os.curdir):
        print("error: input project " + inputprojectname +
              " doesn't exist, check spelling/case?")
        sys.exit(1)

    if ' ' in outputprojectname:
        print("error: output project name has spaces")
        sys.exit(1)

    if ' ' in manufacturer:
        print("error: manufacturer name has spaces")
        sys.exit(1)

    # remove a trailing slash if it exists
    if inputprojectname[-1:] == "/":
        inputprojectname = inputprojectname[0:-1]

    if outputprojectname[-1:] == "/":
        outputprojectname = outputprojectname[0:-1]

    #check that the folders are OK
    if os.path.isdir(inputprojectname) == False:
        print("error: input project not found")
        sys.exit(1)

    if os.path.isdir(outputpath):
        print("error: output project allready exists")
        sys.exit(1)
    # rmtree(output)

    print("copying " + inputprojectname + " folder to " + outputpath)
    copytree(inputprojectname, outputpath, ignore=ignore_patterns(*DONT_COPY))

    oldroot = ""
    newroot = ""

    if numargs == 4:
        configpath = os.path.join(inputprojectname, "config")
        xcconfig = parse_xcconfig(configpath + "/" + inputprojectname +
                                  "-mac.xcconfig")
        oldroot = xcconfig["IPLUG2_ROOT"]
        iplug2folder = os.path.abspath(os.path.join(configpath, oldroot))
        newroot = os.path.relpath(iplug2folder,
                                  os.path.join(outputpath, "config"))
    else:
        newroot = ""

    #replace manufacturer name strings
    for dir in dirwalk(outputpath, inputprojectname, outputprojectname,
                       "DJLastword", manufacturer, oldroot, newroot):
        pass

    #replace project name in root
    for dir in dirwalk(scriptpath, inputprojectname, outputprojectname,
                       "DJLastword", manufacturer, oldroot, newroot):
        pass

    #replace project name in github
    for dir in dirwalk(scriptpath + "/.github/workflows", inputprojectname,
                       outputprojectname, "DJLastword", manufacturer, oldroot,
                       newroot):
        pass

    #replace project name in vscode
    for dir in dirwalk(scriptpath + "/.vscode", inputprojectname,
                       outputprojectname, "DJLastword", manufacturer, oldroot,
                       newroot):
        pass

    # print("\ncopying gitignore template into project folder\n")

    # copy('gitignore_template', outputpath + "/.gitignore")

    config = parse_config(outputpath)

    config["PLUG_UNIQUE_ID"] = randomFourChar()

    set_uniqueid(outputpath, config["PLUG_UNIQUE_ID"])

    pp = pprint.PrettyPrinter(indent=4)
    pp.pprint(config)

    print("\ndone - don't forget to change PLUG_MFR_UID in config.h")
Beispiel #49
0
def create_portable_app(web2py_source, copy_database=False, copy_uploads=False):
    """Function to create the portable app based on the parameters"""

    from gluon.admin import apath
    import shutil,tempfile,os
    import zipfile
    import contenttype

    cachedir = os.path.join(apath("%s" % appname, r=request), "cache")
    tempdir = tempfile.mkdtemp("", "eden-", cachedir)
    workdir = os.path.join(tempdir, "web2py")
    if copy_uploads:
        ignore = shutil.ignore_patterns("*.db", "*.log", "*.table", "errors", "sessions", "compiled" , "cache", ".bzr", "*.pyc")
    else:
        ignore = shutil.ignore_patterns("*.db", "*.log", "*.table", "errors", "sessions", "compiled" , "uploads", "cache", ".bzr", "*.pyc")

    appdir = os.path.join(workdir, "applications", appname)
    shutil.copytree(apath("%s" % appname, r=request),\
                    appdir, \
                    ignore = ignore)
    os.mkdir(os.path.join(appdir, "errors"))
    os.mkdir(os.path.join(appdir, "sessions"))
    os.mkdir(os.path.join(appdir, "cache"))
    if not copy_uploads:
        os.mkdir(os.path.join(appdir, "uploads"))

    shutil.copy(os.path.join(appdir, "deployment-templates", "cron", "crontab"),\
            os.path.join(appdir, "cron", "crontab"))

    if copy_database:
        # Copy the db for the portable app
        s3db.load_all_models() # Load all modules to copy everything

        portable_db = DAL("sqlite://storage.db", folder=os.path.join(appdir, "databases"))
        for table in db:
            portable_db.define_table(table._tablename, *[field for field in table])

        portable_db.commit()

        temp_csv_file=tempfile.mkstemp()
        db.export_to_csv_file(open(temp_csv_file[1], "wb"))
        portable_db.import_from_csv_file(open(temp_csv_file[1], "rb"))
        os.unlink(temp_csv_file[1])
        portable_db.commit()

    # Replace the following with a more specific config
    config_template = open(os.path.join(appdir, "deployment-templates", "models", "000_config.py"), "r")
    new_config = open(os.path.join(appdir, "models", "000_config.py"), "w")
    # Replace first occurance of False with True
    new_config.write(config_template.read().replace("False", "True", 1))
    new_config.close()

    # Embedded the web2py source with eden for download
    shutil.copy(os.path.join(cachedir, web2py_source), os.path.join(cachedir, "download.zip"))
    portable_app = os.path.join(cachedir, "download.zip")
    zip = zipfile.ZipFile(portable_app, "a", zipfile.ZIP_DEFLATED)
    tozip = os.path.join(tempdir, "web2py")
    rootlen = len(tempdir) + 1

    for base, dirs, files in os.walk(tozip):
        for directory in dirs:
            directory = os.path.join(base, directory)
            zip.write(directory, directory[rootlen:]) # Create empty directories
        for file in files:
            fn = os.path.join(base, file)
            zip.write(fn, fn[rootlen:])

    zip.close()
    shutil.rmtree(tempdir)
    response.headers["Content-Type"] = contenttype.contenttype(portable_app)
    response.headers["Content-Disposition"] = \
                            "attachment; filename=portable-sahana.zip"

    return response.stream(portable_app)
Beispiel #50
0
def copyFrameworkToApp(framework):
	'''
	Copy the Qt framework into the bundle.
	
	NOTE: OS X 10.11 changes things, and the framework now has @rpath, not the absolute path.
	'''
	# print('%s' % framework)
	if '@rpath' in framework:
		frameworkRoot = os.path.split(os.path.split(os.path.split(framework)[0])[0])[0]
		beginPosition = framework.index('/') + 1
		endPosition = framework.index('/', beginPosition)
		frameworkName = framework[beginPosition:endPosition]
		# print('====> %s : %s' % (frameworkName, qtFrameworksDirectory))
	else:
		frameworkRoot = os.path.split(os.path.split(os.path.split(framework)[0])[0])[0]
		frameworkName = os.path.split(frameworkRoot)[1]
	
	if frameworkName != 'Qt':
		target = os.path.join(installDirectory, 'Frameworks', frameworkName)
		if not os.path.exists(target):
			shutil.copytree(os.path.join(qtFrameworksDirectory, frameworkName), target, symlinks=True, ignore=shutil.ignore_patterns('*debug*', 'Headers', '*.prl'))
Beispiel #51
0
def Main():
    # Pull in all of the gypi files which will be munged into the sdk.
    HOME = dirname(dirname(realpath(__file__)))

    (options, args) = GetOptions()

    SDK = options.sdk_output_dir
    SDK_tmp = '%s.tmp' % SDK

    SNAPSHOT = options.snapshot_location

    # TODO(dgrove) - deal with architectures that are not ia32.

    if exists(SDK):
        rmtree(SDK)

    if exists(SDK_tmp):
        rmtree(SDK_tmp)

    os.makedirs(SDK_tmp)

    # Create and populate sdk/bin.
    BIN = join(SDK_tmp, 'bin')
    os.makedirs(BIN)

    os.makedirs(join(BIN, 'snapshots'))

    # Copy the Dart VM binary and the Windows Dart VM link library
    # into sdk/bin.
    #
    # TODO(dgrove) - deal with architectures that are not ia32.
    build_dir = os.path.dirname(SDK)
    dart_file_extension = ''
    if HOST_OS == 'win32':
        dart_file_extension = '.exe'
        dart_import_lib_src = join(HOME, build_dir, 'dart.lib')
        dart_import_lib_dest = join(BIN, 'dart.lib')
        copyfile(dart_import_lib_src, dart_import_lib_dest)
    dart_src_binary = join(HOME, build_dir, 'dart' + dart_file_extension)
    dart_dest_binary = join(BIN, 'dart' + dart_file_extension)
    copyfile(dart_src_binary, dart_dest_binary)
    copymode(dart_src_binary, dart_dest_binary)
    # Strip the binaries on platforms where that is supported.
    if HOST_OS == 'linux':
        subprocess.call(['strip', dart_dest_binary])
    elif HOST_OS == 'macos':
        subprocess.call(['strip', '-x', dart_dest_binary])

    #
    # Create and populate sdk/include.
    #
    INCLUDE = join(SDK_tmp, 'include')
    os.makedirs(INCLUDE)
    copyfile(join(HOME, 'runtime', 'include', 'dart_api.h'),
             join(INCLUDE, 'dart_api.h'))
    copyfile(join(HOME, 'runtime', 'include', 'dart_mirrors_api.h'),
             join(INCLUDE, 'dart_mirrors_api.h'))
    copyfile(join(HOME, 'runtime', 'include', 'dart_native_api.h'),
             join(INCLUDE, 'dart_native_api.h'))
    copyfile(join(HOME, 'runtime', 'include', 'dart_tools_api.h'),
             join(INCLUDE, 'dart_tools_api.h'))

    #
    # Create and populate sdk/lib.
    #

    LIB = join(SDK_tmp, 'lib')
    os.makedirs(LIB)

    #
    # Create and populate lib/{async, core, isolate, ...}.
    #

    os.makedirs(join(LIB, 'html'))

    for library in [
            join('_blink', 'dartium'),
            join('_chrome', 'dart2js'),
            join('_chrome', 'dartium'),
            join('_internal', 'js_runtime'),
            join('_internal', 'sdk_library_metadata'), 'async', 'collection',
            'convert', 'core', 'developer', 'internal', 'io', 'isolate',
            join('html', 'dart2js'),
            join('html', 'dartium'),
            join('html', 'html_common'),
            join('indexed_db', 'dart2js'),
            join('indexed_db', 'dartium'), 'js', 'js_util', 'math', 'mirrors',
            'profiler', 'typed_data',
            join('svg', 'dart2js'),
            join('svg', 'dartium'),
            join('web_audio', 'dart2js'),
            join('web_audio', 'dartium'),
            join('web_gl', 'dart2js'),
            join('web_gl', 'dartium'),
            join('web_sql', 'dart2js'),
            join('web_sql', 'dartium')
    ]:
        copytree(join(HOME, 'sdk', 'lib', library),
                 join(LIB, library),
                 ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi',
                                        '*.sh', '.gitignore'))

    # Copy the platform descriptors.
    for file_name in [
            "dart_client.platform", "dart_server.platform",
            "dart_shared.platform"
    ]:
        copyfile(join(HOME, 'sdk', 'lib', file_name), join(LIB, file_name))

    # Copy libraries.dart to lib/_internal/libraries.dart for backwards
    # compatibility.
    #
    # TODO(sigmund): stop copying libraries.dart. Old versions (<=0.25.1-alpha.4)
    # of the analyzer package do not support the new location of this file. We
    # should be able to remove the old file once we release a newer version of
    # analyzer and popular frameworks have migrated to use it.
    copyfile(
        join(HOME, 'sdk', 'lib', '_internal', 'sdk_library_metadata', 'lib',
             'libraries.dart'), join(LIB, '_internal', 'libraries.dart'))

    # Create and copy tools.
    UTIL = join(SDK_tmp, 'util')
    os.makedirs(UTIL)

    RESOURCE = join(SDK_tmp, 'lib', '_internal', 'pub', 'asset')
    os.makedirs(os.path.dirname(RESOURCE))
    copytree(join(HOME, 'third_party', 'pkg', 'pub', 'lib', 'src', 'asset'),
             join(RESOURCE),
             ignore=ignore_patterns('.svn'))

    # Copy in 7zip for Windows.
    if HOST_OS == 'win32':
        copytree(join(HOME, 'third_party', '7zip'),
                 join(RESOURCE, '7zip'),
                 ignore=ignore_patterns('.svn'))

    # Copy dart2js/pub.
    CopyDartScripts(HOME, SDK_tmp)

    CopySnapshots(SNAPSHOT, SDK_tmp)
    CopyDartdocResources(HOME, SDK_tmp)
    CopyAnalyzerSources(HOME, LIB)
    CopyAnalysisSummaries(SNAPSHOT, LIB)
    CopyDevCompilerSdk(HOME, LIB)

    # Write the 'version' file
    version = utils.GetVersion()
    versionFile = open(os.path.join(SDK_tmp, 'version'), 'w')
    versionFile.write(version + '\n')
    versionFile.close()

    # Write the 'revision' file
    revision = utils.GetGitRevision()

    if revision is not None:
        with open(os.path.join(SDK_tmp, 'revision'), 'w') as f:
            f.write('%s\n' % revision)
            f.close()

    Copy(join(HOME, 'README.dart-sdk'), join(SDK_tmp, 'README'))
    Copy(join(HOME, 'LICENSE'), join(SDK_tmp, 'LICENSE'))
    Copy(join(HOME, 'sdk', 'api_readme.md'),
         join(SDK_tmp, 'lib', 'api_readme.md'))

    move(SDK_tmp, SDK)
Beispiel #52
0
    def update(self, archiveURL, path):
        print("Starting the update process...")
        error = False
        print("Backing up folder before updating...")
        shutil.copytree(path,
                        "backup",
                        ignore=shutil.ignore_patterns(*IGNORED))

        if "Temp" in os.listdir():
            shutil.rmtree("Temp")
        os.mkdir("Temp")

        try:
            print("Downloading the update...")
            urllib.request.urlretrieve(archiveURL, filename="Temp/update.zip")
        except Exception as err:
            error = True
            print(err)
            print(
                "An error occured when trying to retrieve the update...\nPlease verify the URL used."
            )

        if not error:
            try:
                print("Removing old version...")
                for root, dirs, files in os.walk(path, topdown=True):
                    dirs[:] = [d for d in dirs if d not in IGNORED]
                    if not os.path.abspath(root) == os.path.abspath(path):
                        shutil.rmtree(root)
                    else:
                        for file in files:
                            os.remove(file)

                print("Extracting new files...")
                with zipfile.ZipFile("Temp/update.zip", 'r') as zip_ref:
                    zip_ref.extractall("Temp/update")

                print("Copying new files...")
                copy_tree("Temp/update/" + os.listdir("Temp/update")[0], path)

            except Exception as err:
                error = True
                print(err)
                print(
                    'An error occured when updating...\nPrevious version can be found on the "backup" folder.'
                )

            if "Temp" in os.listdir():
                try:
                    print("Removing temporary files...")
                    shutil.rmtree("Temp/")
                except Exception as err:
                    print(err)

            if not error:
                try:
                    print("Cleaning the folder...")
                    shutil.rmtree("backup")
                    print(
                        "\nUpdating finished!\nPlease restart the script to apply changes!"
                    )
                except Exception as err:
                    error = True
                    print(err)
                    print(
                        'An error occured while deleting the following folder. Please do it manually.\n-"backup"'
                    )

        return not error
Beispiel #53
0
if __name__ == "__main__":
    if not args.deterministic:
        cudnn.benchmark = True
        cudnn.deterministic = False
    else:
        cudnn.benchmark = False
        cudnn.deterministic = True

    random.seed(args.seed)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)
    torch.cuda.manual_seed(args.seed)

    snapshot_path = "../model/{}_{}/{}".format(args.exp, args.labeled_num,
                                               args.model)
    if not os.path.exists(snapshot_path):
        os.makedirs(snapshot_path)
    if os.path.exists(snapshot_path + '/code'):
        shutil.rmtree(snapshot_path + '/code')
    shutil.copytree('.', snapshot_path + '/code',
                    shutil.ignore_patterns(['.git', '__pycache__']))

    logging.basicConfig(filename=snapshot_path + "/log.txt",
                        level=logging.INFO,
                        format='[%(asctime)s.%(msecs)03d] %(message)s',
                        datefmt='%H:%M:%S')
    logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
    logging.info(str(args))
    train(args, snapshot_path)
Beispiel #54
0
def CopyAnalyzerSources(home, lib_dir):
    for library in ['analyzer', 'analysis_server']:
        copytree(join(home, 'pkg', library),
                 join(lib_dir, library),
                 ignore=ignore_patterns('*.svn', 'doc', '*.py', '*.gypi',
                                        '*.sh', '.gitignore', 'packages'))
Beispiel #55
0
def execute_deployment():
    """Executes the deployment process after doing the prerequisite checks.

    Raises:
        Exception. App name is invalid.
        Exception. Custom version is used with production app.
        Exception. App name is not specified.
        Exception. The deployment script is not run from a release or test
            branch.
        Exception. The deployment script is run for prod server from a test
            branch.
        Exception. Current release version has '.' character.
        Exception. Last commit message is invalid.
        Exception. The mailgun API key is not added before deployment.
        Exception. Could not find third party directory.
        Exception. Invalid directory accessed during deployment.
    """
    parsed_args = _PARSER.parse_args()
    custom_version = None
    if parsed_args.app_name:
        app_name = parsed_args.app_name
        if app_name not in [APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
                            ] and ('migration' not in app_name):
            raise Exception('Invalid app name: %s' % app_name)
        if parsed_args.version and app_name == APP_NAME_OPPIASERVER:
            raise Exception('Cannot use custom version with production app.')
        # Note that custom_version may be None.
        custom_version = parsed_args.version
    else:
        raise Exception('No app name specified.')

    current_branch_name = common.get_current_branch_name()

    release_dir_name = 'deploy-%s-%s-%s' % (
        '-'.join('-'.join(app_name.split('.')).split(':')),
        current_branch_name, CURRENT_DATETIME.strftime('%Y%m%d-%H%M%S'))
    release_dir_path = os.path.join(os.getcwd(), '..', release_dir_name)

    deploy_data_path = os.path.join(os.getcwd(), os.pardir, 'release-scripts',
                                    'deploy_data', app_name)

    install_third_party_libs.main()

    if not (common.is_current_branch_a_release_branch() or
            (common.is_current_branch_a_test_branch())):
        raise Exception(
            'The deployment script must be run from a release or test branch.')
    if common.is_current_branch_a_test_branch() and (app_name in [
            APP_NAME_OPPIASERVER, APP_NAME_OPPIATESTSERVER
    ]):
        raise Exception('Test branch can only be deployed to backup server.')
    if custom_version is not None:
        current_release_version = custom_version.replace(DOT_CHAR, HYPHEN_CHAR)
    else:
        current_release_version = current_branch_name[
            len(common.RELEASE_BRANCH_NAME_PREFIX):].replace(
                DOT_CHAR, HYPHEN_CHAR)

    # This is required to compose the release_version_library_url
    # (defined in switch_version function) correctly.
    if '.' in current_release_version:
        raise Exception('Current release version has \'.\' character.')

    assert len(current_release_version) <= 25, (
        'The length of the "version" arg should be less than or '
        'equal to 25 characters.')

    # Do prerequisite checks.
    common.require_cwd_to_be_oppia()
    common.ensure_release_scripts_folder_exists_and_is_up_to_date()
    gcloud_adapter.require_gcloud_to_be_available()
    try:
        if app_name == APP_NAME_OPPIASERVER:
            check_release_doc()
            release_version_number = common.get_current_release_version_number(
                current_branch_name)
            last_commit_message = subprocess.check_output(
                'git log -1 --pretty=%B'.split())
            personal_access_token = common.get_personal_access_token()
            if not common.is_current_branch_a_hotfix_branch():
                if not last_commit_message.startswith(
                        'Update authors and changelog for v%s' %
                    (release_version_number)):
                    raise Exception('Invalid last commit message: %s.' %
                                    (last_commit_message))
                g = github.Github(personal_access_token)
                repo = g.get_organization('oppia').get_repo('oppia')
                common.check_blocking_bug_issue_count(repo)
                common.check_prs_for_current_release_are_released(repo)

            check_travis_and_circleci_tests(current_branch_name)
            update_configs.main(personal_access_token)
            with python_utils.open_file(common.FECONF_PATH, 'r') as f:
                feconf_contents = f.read()
                if ('MAILGUN_API_KEY' not in feconf_contents
                        or 'MAILGUN_API_KEY = None' in feconf_contents):
                    raise Exception(
                        'The mailgun API key must be added before deployment.')
        if not os.path.exists(THIRD_PARTY_DIR):
            raise Exception(
                'Could not find third_party directory at %s. Please run '
                'install_third_party_libs.py prior to running this script.' %
                THIRD_PARTY_DIR)

        current_git_revision = subprocess.check_output(
            ['git', 'rev-parse', 'HEAD']).strip()

        # Create a folder in which to save the release candidate.
        python_utils.PRINT('Ensuring that the release directory parent exists')
        common.ensure_directory_exists(os.path.dirname(release_dir_path))

        # Copy files to the release directory. Omits the .git subfolder.
        python_utils.PRINT('Copying files to the release directory')
        shutil.copytree(os.getcwd(),
                        release_dir_path,
                        ignore=shutil.ignore_patterns('.git'))

        # Change the current directory to the release candidate folder.
        with common.CD(release_dir_path):
            if not os.getcwd().endswith(release_dir_name):
                raise Exception(
                    'Invalid directory accessed during deployment: %s' %
                    os.getcwd())

            python_utils.PRINT('Changing directory to %s' % os.getcwd())

            python_utils.PRINT('Preprocessing release...')
            preprocess_release(app_name, deploy_data_path)

            update_and_check_indexes(app_name)
            build_scripts(parsed_args.maintenance_mode)
            deploy_application_and_write_log_entry(app_name,
                                                   current_release_version,
                                                   current_git_revision)

            python_utils.PRINT('Returning to oppia/ root directory.')

        switch_version(app_name, current_release_version)
        flush_memcache(app_name)
        check_breakage(app_name, current_release_version)

        python_utils.PRINT('Done!')
    finally:
        common.run_cmd([
            'git', 'checkout', '--', update_configs.LOCAL_FECONF_PATH,
            update_configs.LOCAL_CONSTANTS_PATH
        ])
Beispiel #56
0
##    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
##    GNU General Public License for more details.
##
##    You should have received a copy of the GNU General Public License
##    along with this program.  If not, see <http://www.gnu.org/licenses/>.
##
##    you can contact me [email protected]

''' % (FULL_TITLE, COPYRIGHT)

import py2exe, shutil, sys, os
from  distutils.core import setup
from zipfile import ZipFile

shutil.copytree('.', srcpath,
                ignore=shutil.ignore_patterns(
                    '*.pyc', '*.pp', 'error.txt', 'log.txt', 'downloaded', '*.zip'))
z = ZipFile(srcpath+'.zip', 'w')
for root, dirs, files in os.walk(srcpath):
    for name in files:
        path = os.path.join(root, name)
        if os.path.splitext(name)[1] == '.py':
            text = open(path).read()
            f = open(path, 'w')
            f.write(gplcomment)
            f.write(text)
            f.close()
        z.write(path)
z.close()

shutil.rmtree(srcpath)
Beispiel #57
0
        zip_path.unlink()
        st_rerun.rerun()

    st.stop()

if st.button("Copy and Anonymise"):
    with tempfile.TemporaryDirectory() as temp_dir:
        pl_temp_dir = pathlib.Path(temp_dir)
        new_temp_location = pl_temp_dir.joinpath(patient_directory.name)

        "Copying to temp directory, skipping DICOM files..."

        shutil.copytree(
            patient_directory,
            new_temp_location,
            ignore=shutil.ignore_patterns("*.DCM", "demographic.*"),
        )

        "Creating anonymised demographic file..."

        new_demographic_file = new_temp_location.joinpath(
            f"demographic.{patient_id}")

        shutil.copy2(ANON_DEMOGRAPHIC_FILE, new_demographic_file)
        with open(new_demographic_file, "r") as f:
            demographic_data = f.readlines()

        demographic_data[3] = demographic_data[3].replace("000000", patient_id)

        with open(new_demographic_file, "w") as f:
            f.writelines(demographic_data)
Beispiel #58
0
def parseOptionsAndInitTestdirs():
    """Initialize the list of directories containing our unittest scripts.

    '-h/--help as the first option prints out usage info and exit the program.
    """

    global dont_do_python_api_test
    global just_do_python_api_test
    global just_do_benchmarks_test
    global blacklist
    global blacklistConfig
    global configFile
    global archs
    global compilers
    global count
    global delay
    global dumpSysPath
    global bmExecutable
    global bmBreakpointSpec
    global bmIterationCount
    global failfast
    global filters
    global fs4all
    global ignore
    global progress_bar
    global runHooks
    global skip_build_and_cleanup
    global skip_long_running_test
    global noHeaders
    global regexp
    global rdir
    global sdir_name
    global verbose
    global testdirs

    do_help = False

    if len(sys.argv) == 1:
        return

    # Process possible trace and/or verbose flag, among other things.
    index = 1
    while index < len(sys.argv):
        if sys.argv[index].startswith('-') or sys.argv[index].startswith('+'):
            # We should continue processing...
            pass
        else:
            # End of option processing.
            break

        if sys.argv[index].find('-h') != -1:
            index += 1
            do_help = True
        elif sys.argv[index].startswith('-A'):
            # Increment by 1 to fetch the ARCH spec.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            archs = sys.argv[index].split('^')
            index += 1
        elif sys.argv[index].startswith('-C'):
            # Increment by 1 to fetch the CC spec.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            compilers = sys.argv[index].split('^')
            index += 1
        elif sys.argv[index].startswith('-D'):
            dumpSysPath = True
            index += 1
        elif sys.argv[index].startswith('-a'):
            dont_do_python_api_test = True
            index += 1
        elif sys.argv[index].startswith('+a'):
            just_do_python_api_test = True
            index += 1
        elif sys.argv[index].startswith('+b'):
            just_do_benchmarks_test = True
            index += 1
        elif sys.argv[index].startswith('-b'):
            # Increment by 1 to fetch the blacklist file name option argument.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            blacklistFile = sys.argv[index]
            if not os.path.isfile(blacklistFile):
                print "Blacklist file:", blacklistFile, "does not exist!"
                usage()
            index += 1
            # Now read the blacklist contents and assign it to blacklist.
            execfile(blacklistFile, globals(), blacklistConfig)
            blacklist = blacklistConfig.get('blacklist')
        elif sys.argv[index].startswith('-c'):
            # Increment by 1 to fetch the config file name option argument.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            configFile = sys.argv[index]
            if not os.path.isfile(configFile):
                print "Config file:", configFile, "does not exist!"
                usage()
            index += 1
        elif sys.argv[index].startswith('-d'):
            delay = True
            index += 1
        elif sys.argv[index].startswith('-e'):
            # Increment by 1 to fetch the full path of the benchmark executable.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            bmExecutable = sys.argv[index]
            if not is_exe(bmExecutable):
                usage()
            index += 1
        elif sys.argv[index].startswith('-F'):
            failfast = True
            index += 1
        elif sys.argv[index].startswith('-f'):
            # Increment by 1 to fetch the filter spec.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            filters.append(sys.argv[index])
            index += 1
        elif sys.argv[index].startswith('-g'):
            fs4all = False
            index += 1
        elif sys.argv[index].startswith('-i'):
            ignore = True
            index += 1
        elif sys.argv[index].startswith('-k'):
            # Increment by 1 to fetch the runhook lldb command.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            runHooks.append(sys.argv[index])
            index += 1
        elif sys.argv[index].startswith('-l'):
            skip_long_running_test = False
            index += 1
        elif sys.argv[index].startswith('-n'):
            noHeaders = True
            index += 1
        elif sys.argv[index].startswith('-p'):
            # Increment by 1 to fetch the reg exp pattern argument.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            regexp = sys.argv[index]
            index += 1
        elif sys.argv[index].startswith('-r'):
            # Increment by 1 to fetch the relocated directory argument.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            rdir = os.path.abspath(sys.argv[index])
            if os.path.exists(rdir):
                print "Relocated directory:", rdir, "must not exist!"
                usage()
            index += 1
        elif sys.argv[index].startswith('-S'):
            skip_build_and_cleanup = True
            index += 1
        elif sys.argv[index].startswith('-s'):
            # Increment by 1 to fetch the session dir name.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            sdir_name = sys.argv[index]
            index += 1
        elif sys.argv[index].startswith('-t'):
            os.environ["LLDB_COMMAND_TRACE"] = "YES"
            index += 1
        elif sys.argv[index].startswith('-v'):
            verbose = 2
            index += 1
        elif sys.argv[index].startswith('-w'):
            os.environ["LLDB_WAIT_BETWEEN_TEST_CASES"] = 'YES'
            index += 1
        elif sys.argv[index].startswith('-x'):
            # Increment by 1 to fetch the breakpoint specification of the benchmark executable.
            index += 1
            if index >= len(sys.argv):
                usage()
            bmBreakpointSpec = sys.argv[index]
            index += 1
        elif sys.argv[index].startswith('-y'):
            # Increment by 1 to fetch the the benchmark iteration count.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            bmIterationCount = int(sys.argv[index])
            index += 1
        elif sys.argv[index].startswith('-#'):
            # Increment by 1 to fetch the repeat count argument.
            index += 1
            if index >= len(sys.argv) or sys.argv[index].startswith('-'):
                usage()
            count = int(sys.argv[index])
            index += 1
        else:
            print "Unknown option: ", sys.argv[index]
            usage()

    if do_help == True:
        usage()

    # Do not specify both '-a' and '+a' at the same time.
    if dont_do_python_api_test and just_do_python_api_test:
        usage()

    # The simple progress bar is turned on only if verbose == 0 and LLDB_COMMAND_TRACE is not 'YES'
    if ("LLDB_COMMAND_TRACE" not in os.environ
            or os.environ["LLDB_COMMAND_TRACE"] != "YES") and verbose == 0:
        progress_bar = True

    # Gather all the dirs passed on the command line.
    if len(sys.argv) > index:
        testdirs = map(os.path.abspath, sys.argv[index:])

    # If '-r dir' is specified, the tests should be run under the relocated
    # directory.  Let's copy the testdirs over.
    if rdir:
        from shutil import copytree, ignore_patterns

        tmpdirs = []
        for srcdir in testdirs:
            dstdir = os.path.join(rdir, os.path.basename(srcdir))
            # Don't copy the *.pyc and .svn stuffs.
            copytree(srcdir, dstdir, ignore=ignore_patterns('*.pyc', '.svn'))
            tmpdirs.append(dstdir)

        # This will be our modified testdirs.
        testdirs = tmpdirs

        # With '-r dir' specified, there's no cleanup of intermediate test files.
        os.environ["LLDB_DO_CLEANUP"] = 'NO'

        # If testdirs is ['test'], the make directory has already been copied
        # recursively and is contained within the rdir/test dir.  For anything
        # else, we would need to copy over the make directory and its contents,
        # so that, os.listdir(rdir) looks like, for example:
        #
        #     array_types conditional_break make
        #
        # where the make directory contains the Makefile.rules file.
        if len(testdirs) != 1 or os.path.basename(testdirs[0]) != 'test':
            # Don't copy the .svn stuffs.
            copytree('make',
                     os.path.join(rdir, 'make'),
                     ignore=ignore_patterns('.svn'))

    #print "testdirs:", testdirs

    # Source the configFile if specified.
    # The side effect, if any, will be felt from this point on.  An example
    # config file may be these simple two lines:
    #
    # sys.stderr = open("/tmp/lldbtest-stderr", "w")
    # sys.stdout = open("/tmp/lldbtest-stdout", "w")
    #
    # which will reassign the two file objects to sys.stderr and sys.stdout,
    # respectively.
    #
    # See also lldb-trunk/example/test/usage-config.
    global config
    if configFile:
        # Pass config (a dictionary) as the locals namespace for side-effect.
        execfile(configFile, globals(), config)
Beispiel #59
0
    def make_output_dirs(self):
        self.output_err = ''
        try:
            self.progress_text = 'Removing old output directory...\n'

            output_dir = os.path.join(self.output_dir(), self.project_name())
            temp_dir = os.path.join(TEMP_DIR, 'webexectemp')
            if os.path.exists(temp_dir):
                shutil.rmtree(temp_dir)

            self.progress_text = 'Making new directories...\n'

            if not os.path.exists(output_dir):
                os.makedirs(output_dir)

            os.makedirs(temp_dir)

            self.copy_files_to_project_folder()

            json_file = os.path.join(self.project_dir(), 'package.json')

            if self.output_package_json:
                with codecs.open(json_file, 'w+', encoding='utf-8') as f:
                    f.write(self.generate_json())

            zip_file = os.path.join(temp_dir, self.project_name() + '.nw')

            app_nw_folder = os.path.join(temp_dir,
                                         self.project_name() + '.nwf')

            shutil.copytree(self.project_dir(),
                            app_nw_folder,
                            ignore=shutil.ignore_patterns(output_dir))

            zip_files(zip_file, self.project_dir(), exclude_paths=[output_dir])
            for ex_setting in self.settings['export_settings'].values():
                if ex_setting.value:
                    self.progress_text = '\n'
                    name = ex_setting.display_name
                    self.progress_text = u'Making files for {}...'.format(name)
                    export_dest = os.path.join(output_dir, ex_setting.name)
                    versions = re.findall('(\d+)\.(\d+)\.(\d+)',
                                          self.selected_version())[0]

                    minor = int(versions[1])
                    if minor >= 12:
                        export_dest = export_dest.replace(
                            'node-webkit', 'nwjs')

                    if os.path.exists(export_dest):
                        shutil.rmtree(export_dest)

                    # shutil will make the directory for us
                    shutil.copytree(
                        get_data_path('files/' + ex_setting.name),
                        export_dest,
                        ignore=shutil.ignore_patterns('place_holder.txt'))
                    shutil.rmtree(get_data_path('files/' + ex_setting.name))
                    self.progress_text += '.'

                    if 'mac' in ex_setting.name:
                        uncomp_setting = self.get_setting(
                            'uncompressed_folder')
                        uncompressed = uncomp_setting.value
                        app_path = os.path.join(export_dest,
                                                self.project_name() + '.app')

                        try:
                            shutil.move(os.path.join(export_dest, 'nwjs.app'),
                                        app_path)
                        except IOError:
                            shutil.move(
                                os.path.join(export_dest, 'node-webkit.app'),
                                app_path)

                        plist_path = os.path.join(app_path, 'Contents',
                                                  'Info.plist')

                        plist_dict = plistlib.readPlist(plist_path)

                        plist_dict['CFBundleDisplayName'] = self.project_name()
                        plist_dict['CFBundleName'] = self.project_name()
                        version_setting = self.get_setting('version')
                        plist_dict[
                            'CFBundleShortVersionString'] = version_setting.value
                        plist_dict['CFBundleVersion'] = version_setting.value

                        plistlib.writePlist(plist_dict, plist_path)

                        self.progress_text += '.'

                        app_nw_res = os.path.join(app_path, 'Contents',
                                                  'Resources', 'app.nw')

                        if uncompressed:
                            shutil.copytree(app_nw_folder, app_nw_res)
                        else:
                            shutil.copy(zip_file, app_nw_res)
                        self.create_icns_for_app(
                            os.path.join(app_path, 'Contents', 'Resources',
                                         'nw.icns'))

                        self.progress_text += '.'
                    else:
                        ext = ''
                        windows = False
                        if 'windows' in ex_setting.name:
                            ext = '.exe'
                            windows = True

                        nw_path = os.path.join(export_dest,
                                               ex_setting.dest_files[0])

                        if windows:
                            self.replace_icon_in_exe(nw_path)

                        self.compress_nw(nw_path)

                        dest_binary_path = os.path.join(
                            export_dest,
                            self.project_name() + ext)
                        if 'linux' in ex_setting.name:
                            self.make_desktop_file(dest_binary_path,
                                                   export_dest)

                        join_files(dest_binary_path, nw_path, zip_file)

                        sevenfivefive = (stat.S_IRWXU | stat.S_IRGRP
                                         | stat.S_IXGRP | stat.S_IROTH
                                         | stat.S_IXOTH)
                        os.chmod(dest_binary_path, sevenfivefive)

                        self.progress_text += '.'

                        if os.path.exists(nw_path):
                            os.remove(nw_path)

        except Exception:
            exc = traceback.format_exception(sys.exc_info()[0],
                                             sys.exc_info()[1],
                                             sys.exc_info()[2])
            self.output_err += u''.join(exc)
            self.logger.error(exc)
        finally:
            shutil.rmtree(temp_dir)
Beispiel #60
0
def main():
    parser = argparse.ArgumentParser(
        description='Prepare pub packages for upload')
    parser.add_argument('--config', type=str, default='android_Release')
    parser.add_argument('--sdk-root', type=str, default='.')
    parser.add_argument('--packages',
                        default=['mojo', 'mojom', 'mojo_services', 'sky'])
    parser.add_argument('--out-dir', default=None)
    parser.add_argument('build_dir', type=str)
    args = parser.parse_args()

    rel_build_dir = os.path.join(args.build_dir, args.config)
    build_dir = os.path.abspath(rel_build_dir)
    sdk_dir = os.path.abspath(args.sdk_root)
    print('Using SDK in %s' % sdk_dir)
    print('Using build in %s' % build_dir)

    preparing_sky_package = 'sky' in args.packages

    apks_dir = os.path.join(build_dir, 'apks')
    sky_apk_filename = 'SkyDemo.apk'
    sky_apk = os.path.join(apks_dir, sky_apk_filename)
    if preparing_sky_package and (not os.path.exists(sky_apk)):
        print('Required file %s not found.' % sky_apk)
        return -1

    temp_dir = args.out_dir
    if temp_dir:
        try:
            shutil.rmtree(temp_dir)
        except OSError:
            pass
        os.makedirs(temp_dir)
    else:
        # Create a temporary directory to copy files into.
        temp_dir = tempfile.mkdtemp(prefix='pub_packages-')

    print('Packages ready to be uploaded in %s' % temp_dir)

    # Copy packages
    dart_pkg_dir = os.path.join(build_dir, 'gen', 'dart-pkg')
    for package in args.packages:
        print('Preparing package %s' % package)
        src_dir = os.path.join(dart_pkg_dir, package)
        dst_dir = os.path.join(temp_dir, package)
        ignore = None
        # Special case 'mojom' package to not copy generated mojom.dart files.
        if package == 'mojom':
            ignore = shutil.ignore_patterns('*.mojom.dart')
        copy_package(src_dir, dst_dir, ignore)
        # Special case 'mojom' package to remove empty directories.
        if package == 'mojom':
            remove_empty_dirs(dst_dir)
        install_mojo_license_and_authors_files(sdk_dir, dst_dir)

    # Copy Sky apk.
    if preparing_sky_package:
        prepared_apks_dir = os.path.join(temp_dir, 'sky', 'apks')
        os.makedirs(prepared_apks_dir)
        shutil.copyfile(sky_apk,
                        os.path.join(prepared_apks_dir, sky_apk_filename))