def write_permacache_from_dir(dirname):
    # we want the whole list so that we can display accurate progress
    # information. If we're operating on more than tens of millions of
    # files, we should either bail out or tweak this to not need the
    # whole list at once
    allfiles = []
    for root, dirs, files in os.walk(dirname):
        for f in files:
            allfiles.append(os.path.join(root, f))

    for fname in progress(allfiles, persec=True):
        try:
            write_permacache_from_file(fname)
            os.unlink(fname)
        except:
            mr_tools.status("failed on %r" % fname)
            raise

    mr_tools.status("Removing empty directories")
    for root, dirs, files in os.walk(dirname, topdown=False):
        for d in dirs:
            dname = os.path.join(root, d)
            try:
                os.rmdir(dname)
            except OSError as e:
                if e.errno == errno.ENOTEMPTY:
                    mr_tools.status("%s not empty" % (dname,))
                else:
                    raise
Example #2
0
def walkDir(path,formats = False):
    """遍历目录下的所有文件"""

    result = {"files":[],"dirs":[]}

    if formats == "absolute":
        for item in path:
            for root, dirs, files in os.walk(item):
                for f in files:
                    result["files"].append(os.path.join(root,f))
                for d in dirs:
                    result["dirs"].append(os.path.join(root,d))
    elif formats == "relative":
        for item in path:
            for root, dirs, files in os.walk(item):
                for f in files:
                    result["files"].append(re.sub(item + "/","",os.path.join(root,f)))
                for d in dirs:
                    result["files"].append(re.sub(item + "/","",os.path.join(root,d)))
    else:
        for item in path:
            for root, dirs, files in os.walk(item):
                for f in files:
                    result["files"].append(f)
                for d in dirs:
                    result["dirs"].append(d)
    return result
Example #3
0
def _fswalk_follow_symlinks(path):
    '''
    Walk filesystem, following symbolic links (but without recursion), on python2.4 and later

    If a symlink directory loop is detected, emit a warning and skip.
    E.g.: dir1/dir2/sym-dir -> ../dir2
    '''
    assert os.path.isdir(path) # only designed for directory argument
    walkdirs = set([path])
    for dirpath, dirnames, filenames in os.walk(path):
        handle_exclude_include_walk(dirpath, dirnames, [])
        real_dirpath = os.path.realpath(dirpath)
        for dirname in dirnames:
            current = os.path.join(dirpath, dirname)
            real_current = os.path.realpath(current)
            if os.path.islink(current):
                if (real_dirpath == real_current or
                    real_dirpath.startswith(real_current + os.path.sep)):
                    warning("Skipping recursively symlinked directory %s" % dirname)
                else:
                    walkdirs.add(current)
    for walkdir in walkdirs:
        for dirpath, dirnames, filenames in os.walk(walkdir):
            handle_exclude_include_walk(dirpath, dirnames, [])
            yield (dirpath, dirnames, filenames)
Example #4
0
    def test_tmp_dir_normal_2(self):
        tempdir = tempfile.gettempdir()
        # assert temp directory is empty
        self.assertListEqual(list(os.walk(tempfile.tempdir)),
            [(tempfile.tempdir, [], [])])


        class WitnessException(Exception):
            pass

        @with_tempdir
        def createfile():
            fd1, fn1 = tempfile.mkstemp()
            fd2, fn2 = tempfile.mkstemp()
            dir = tempfile.mkdtemp()
            fd3, fn3 = tempfile.mkstemp(dir=dir)
            tempfile.mkdtemp()
            for fd in (fd1, fd2, fd3):
                os.close(fd)
            raise WitnessException()

        self.assertRaises(WitnessException, createfile)

        # assert tempdir didn't change
        self.assertEqual(tempfile.gettempdir(), tempdir)

        # assert temp directory is empty
        self.assertListEqual(list(os.walk(tempdir)),
            [(tempdir, [], [])])
Example #5
0
    def nuke(self, all=False, certs=False, reinit=True):
        """Cleanup local registry DB, plus various additional filesystem cleanups optionally"""
        from sfa.storage.dbschema import DBSchema
        from sfa.util.sfalogging import _SfaLogger
        logger = _SfaLogger(logfile='/var/log/sfa_import.log', loggername='importlog')
        logger.setLevelFromOptVerbose(self.api.config.SFA_API_LOGLEVEL)
        logger.info("Purging SFA records from database")
        dbschema=DBSchema()
        dbschema.nuke()

        # for convenience we re-create the schema here, so there's no need for an explicit
        # service sfa restart
        # however in some (upgrade) scenarios this might be wrong
        if reinit:
            logger.info("re-creating empty schema")
            dbschema.init_or_upgrade()

        # remove the server certificate and all gids found in /var/lib/sfa/authorities
        if certs:
            logger.info("Purging cached certificates")
            for (dir, _, files) in os.walk('/var/lib/sfa/authorities'):
                for file in files:
                    if file.endswith('.gid') or file == 'server.cert':
                        path=dir+os.sep+file
                        os.unlink(path)

        # just remove all files that do not match 'server.key' or 'server.cert'
        if all:
            logger.info("Purging registry filesystem cache")
            preserved_files = [ 'server.key', 'server.cert']
            for (dir,_,files) in os.walk(Hierarchy().basedir):
                for file in files:
                    if file in preserved_files: continue
                    path=dir+os.sep+file
                    os.unlink(path)
Example #6
0
def test_arc_from_dir_re5(tmpdir, arc_file):
    """get an arc file (ideally from the game), unpack it, repackit, unpack it again
    compare the 2 arc files and the 2 output folders"""
    arc_original = Arc(file_path=arc_file)
    arc_original_out = os.path.join(str(tmpdir), os.path.basename(arc_file).replace('.arc', ''))
    arc_original.unpack(arc_original_out)

    arc_from_dir = Arc.from_dir(arc_original_out)
    arc_from_dir_out = os.path.join(str(tmpdir), 'arc-from-dir.arc')
    with open(arc_from_dir_out, 'wb') as w:
        w.write(arc_from_dir)

    arc_from_arc_from_dir = Arc(file_path=arc_from_dir_out)
    arc_from_arc_from_dir_out = os.path.join(str(tmpdir), 'arc-from-arc-from-dir')
    arc_from_arc_from_dir.unpack(arc_from_arc_from_dir_out)

    files_extracted_1 = [f for _, _, files in os.walk(arc_original_out) for f in files]
    files_extracted_2 = [f for _, _, files in os.walk(arc_from_arc_from_dir_out) for f in files]

    # Assumming zlib default compression used in all original arc files.
    assert os.path.getsize(arc_file) == os.path.getsize(arc_from_dir_out)
    # The hashes would be different due to the file_paths ordering
    assert arc_original.files_count == arc_from_arc_from_dir.files_count
    assert sorted(files_extracted_1) == sorted(files_extracted_2)
    assert arc_from_arc_from_dir.file_entries[0].offset == 32768
def function0():
    d={}
    for root, dirs, files in os.walk('C:\\Users\\Та\\Desktop\\универр'):
        for i in dirs:
            for root1, dirs1, files1 in os.walk('C:\\Users\\Та\\Desktop\\универр\\' + i):
                d[len(files1)]=i
    return d
Example #8
0
def find_data_files(srcdir, destdir, *wildcards, **kw):
    """
    get a list of all files under the srcdir matching wildcards,
    returned in a format to be used for install_data
    """
    def walk_helper(arg, dirname, files):
        if '.svn' in dirname:
            return
        names = []
        lst, wildcards, dirnameconverter, destdir = arg
        for wc in wildcards:
            wc_name = os.path.normpath(os.path.join(dirname, wc))
            for f in files:
                filename = os.path.normpath(os.path.join(dirname, f))

                if fnmatch.fnmatch(filename, wc_name) and not os.path.isdir(filename):
                    names.append(filename)
        if names:
            destdirname = dirnameconverter.sub(destdir, dirname)
            lst.append( (destdirname, names ) )

    file_list = []
    recursive = kw.get('recursive', True)
    converter = re.compile('^({0})'.format(srcdir))
    if recursive:
        walk(srcdir, walk_helper, (file_list, wildcards, converter, destdir))
    else:
        walk_helper((file_list, wildcards, converter, destdir),
                    srcdir,
                    [os.path.basename(f) for f in glob.glob(os.path.join(srcdir, '*'))])
    return file_list
def _get_files_matching_extensions(paths, extensions=[]):
    if isinstance(extensions, strbase):
        extensions = [extensions]

    matched_files = defaultdict(lambda: [])

    for path in paths.split(os.pathsep):
        # bad idea... also our current directory isn't meaningful from a WindowCommand
        if path == '.':
            continue

        # !! sometimes occurs in the results on POSIX; remove them
        path = path.replace(u'!!', u'')
        path = os.path.normpath(path)
        if not os.path.exists(path):  # ensure path exists
            continue

        if len(extensions) > 0:
            for _, _, files in os.walk(path):
                for f in files:
                    for ext in extensions:
                        if f.endswith(u''.join((os.extsep, ext))):
                            matched_files[ext].append(os.path.splitext(f)[0])
        else:
            for _, _, files in os.walk(path):
                for f in files:
                    matched_files['*'].append(os.path.splitext(f)[0])

    matched_files = dict([(key, sorted(set(value), key=lambda s: s.lower()))
        for key, value in matched_files.items()])

    return matched_files
Example #10
0
def test_imports():
    u"""Vérifie qu'il n'existe pas d'imports relatifs implicites."""
    # On liste les modules locaux
    locaux = set()
    def test(line):
        assert not re.search('(from|import) (' + '|'.join(locaux) + ')[. ]', line)

    for root, dirs, files in walk(WXGEODIR):
        if 'sympy' in dirs:
            dirs.remove('sympy')
        if 'sympy_OLD' in dirs:
            dirs.remove('sympy_OLD')
        for name in files:
            if name.endswith('.py'):
                locaux.add(name[:-3])
        for name in dirs:
            if isfile(join(root, name, '__init__.py')):
                locaux.add(name)
    assert 'sympy' not in locaux and 'trigonometry' not in locaux
    # on teste les imports
    for root, dirs, files in walk(WXGEODIR):
        for name in files:
            if name.endswith('.py'):
                with open(join(root, name)) as f:
                    for n, line in enumerate(f):
                        if 'from ' in line or 'import ' in line:
                            assert test(line), join(root, name) + ' L' + str(n + 1)
Example #11
0
    def __config_unix__(self):

        
        if os.path.isdir("/var/log") and bool(os.stat("/var/log").st_mode & stat.S_IRWXG):
                self._path = "/var/log/Neofelis"
        elif os.path.isdir("~") and bool(os.stat("~").st_mode & stat.S_IRWXG):
            self._path = "~/log"
        else:
            raise lexcep("Permission Error: Unable to access log directory").with_traceback(sys.exc_info()[2])
        
        try:
            if not os.path.isdir(self._path):
                os.mkdir(self._path)
        except IOError as e:
            raise lexcep(str(e)).with_traceback(sys.exc_info()[2])

        self._size = 0

        try:
            for (path, dirs, files) in os.walk(self._path):
                for file in files:
                    f = os.path.join(path, file)
                    self._size += os.path.getsize(f)

            if self._size / (1024 * 1024.0) > 10:
                for (path, dirs, files) in os.walk(self._path):
                    for file in files:
                        f = os.path.join(path, file)
                        os.remove(f)
        except IOError as e:
            raise lexcep(str(e)).with_traceback(sys.exc_info()[2])
        except Exception as e:
            raise lexcep(str(e)).with_traceback(sys.exc_info()[2])

        self._fn = self._path + strftime("%Y%m%d%H%M%S", gmtime()) + ".log"
Example #12
0
def main(root, componentsToMask, dryRun):
    buildRoot = os.path.abspath(os.path.join(root, '../build'))
    emptyFolder(root, 'buildtools', dryRun)
    emptyFolder(root, 'boost', dryRun)
    emptyFolder(root, 'sample-data', dryRun)
    componentsToMask = [x[0:x.find(' ')] for x in componentsToMask]
    for c in componentsToMask:
        path = os.path.join(root, c)
        print('Masking %s...' % path)
        for folder, dirs, files in os.walk(path):
            print('folder = %s' % folder)
            if '.svn' in dirs:
                dirs.remove('.svn')
                svnPath = os.path.join(folder, '.svn')
                print(svnPath)
                if not dryRun:
                    shutil.rmtree(svnPath)
            for f in files:
                if shouldMask(folder, f):
                    mask(folder, f, dryRun)
    if os.path.isdir(buildRoot):
        emptyFolder(buildRoot, 'Testing', dryRun)
        for c in componentsToMask:
            path = os.path.join(root, c)
            for folder, dirs, files in os.walk(path):
                for f in files:
                    if shouldMask(folder, f):
                        mask(folder, f, dryRun)
Example #13
0
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
                 mode='w'):
    """Create a zip file from all the files under 'base_dir'.  The output
    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
    Python module (if available) or the InfoZIP "zip" utility (if installed
    and found on the default search path).  If neither tool is available,
    raises DistutilsExecError.  Returns the name of the output zip file.
    """
    import zipfile

    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)

    def visit(z, dirname, names):
        for name in names:
            path = os.path.normpath(os.path.join(dirname, name))
            if os.path.isfile(path):
                p = path[len(base_dir) + 1:]
                if not dry_run:
                    z.write(path, p)
                log.debug("adding '%s'" % p)

    compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
    if not dry_run:
        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
        for dirname, dirs, files in os.walk(base_dir):
            visit(z, dirname, files)
        z.close()
    else:
        for dirname, dirs, files in os.walk(base_dir):
            visit(None, dirname, files)
    return zip_filename
Example #14
0
    def _detect_treestyle(self):
        try:
            dirlisting = os.walk(self.get_real_path())
            dirpath, dirnames, filenames = dirlisting.next()

            if not dirnames:
                # No subdirectories
                if filter(self.file_belongs_to_project, filenames):
                    # Translation files found, assume gnu
                    return "gnu"

            # There are subdirectories
            if filter(lambda dirname: dirname == 'templates' or
                      langcode_re.match(dirname), dirnames):
                # Found language dirs assume nongnu
                return "nongnu"

            # No language subdirs found, look for any translation file
            for dirpath, dirnames, filenames in os.walk(self.get_real_path()):
                if filter(self.file_belongs_to_project, filenames):
                    return "gnu"
        except:
            pass

        # Unsure
        return None
Example #15
0
    def getMeasurement(self, l):
        """
        Return the list of files of measurements.
        @type l: C{String}
        @param l: label of a study
        @rtype: C{List}
        @return: C{List} of list of nodes <plot>, and C{List} of files of measurements
        """
        nodes = []
        files = []

        for node in self.getStudyNode(l).getElementsByTagName("measurement"):
            nodes.append(node.getElementsByTagName("plot"))
            fileName = node.attributes["file"].value
            filePath = node.attributes["path"].value

            if filePath == "":
              for root, dirs, fs in os.walk(os.path.join(self.getRepository(), l)):
                  if fileName in fs:
                      filePath = root
                      break
            else: # for Code_Saturne exp data are supposed to be in POST
              for root, dirs, fs in os.walk(os.path.join(self.getRepository(), l, 'POST', filePath)):
                  if fileName in fs:
                      filePath = root
                      break

            files.append(os.path.join(filePath, fileName))

        return nodes, files
    def generate_file_map(self):
        # Read all the files in the given folder.
        # We gather them all and then send them up to GAE.
        # We do this rather than processing template locally. Because local processing
        file_map = dict()
        fdir = os.path.dirname(self.view.file_name()).replace(self.parent_path+'/', '')
        for root, dirs, files in os.walk(self.path):
            for filename in files:
                if any(filename.endswith(postfix) for postfix in ['.tracking', '.html', '.txt', '.yaml', '.js']):
                    contents = read_file(os.path.join(root, filename))
                    file_map['%s/%s' % (fdir, filename)] = contents
                    # file_map[filename] = contents
        for root, dirs, files in os.walk(self.image_path):
            for filename in files:
                image_path = os.path.abspath(os.path.join(root, filename))
                contents = encode_image(image_path)
                file_map[filename] = contents
        for root, dirs, files in os.walk(self.parent_path):
            for filename in files:
                if any(filename.endswith(postfix) for postfix in ['.tracking', '.html', '.txt', '.yaml', '.js']):
                    contents = read_file(os.path.join(root, filename))
                    file_map[filename] = contents
        print(file_map.keys())

        return file_map
Example #17
0
def findReplace(directory, find, replace, filePattern):
	#print '-------------------------------------------------'
	restart = True
	while restart:
		restart = False
		for path, dirs, files in os.walk(os.path.abspath(directory)):
			newpath = path.replace(find, replace)			
			if(newpath != path):
				os.rename(path, newpath)
				#print 'rename_dir[' + path + ']'
				restart = True
				break
	#print '-------------------------------------------------'
	for path, dirs, files in os.walk(os.path.abspath(directory)):
		for filename in fnmatch.filter(files, filePattern):
			if(filename == __file__):
				continue
			
			newfilename = filename.replace(find, replace)
			if(newfilename != filename):
				os.rename(os.path.join(path, filename), os.path.join(path, newfilename))
				filename = newfilename
				#print 'rename_filename[' + filename + ']'

			filepath = os.path.join(path, filename)
			with open(filepath) as f:
				s = f.read()

			if(s.find(find)):
				s = s.replace(find, replace)            
				filepath = filepath.replace(find, replace)

				#print 'changed_file[' + filepath + ']'
				with open(filepath, "w") as f:
					f.write(s)
Example #18
0
def get_package_data():  # pragma: no cover
    ASDF_STANDARD_ROOT = os.environ.get("ASDF_STANDARD_ROOT", "asdf-standard")

    schemas = []
    root = os.path.join(ASDF_STANDARD_ROOT, "schemas")
    for node, dirs, files in os.walk(root):
        for fname in files:
            if fname.endswith('.yaml'):
                schemas.append(
                    os.path.relpath(
                        os.path.join(node, fname),
                        root))

    reference_files = []
    root = os.path.join(ASDF_STANDARD_ROOT, "reference_files")
    for node, dirs, files in os.walk(root):
        for fname in files:
            if fname.endswith('.yaml') or fname.endswith('.asdf'):
                reference_files.append(
                    os.path.relpath(
                        os.path.join(node, fname),
                        root))

    return {
        str('asdf.schemas'): schemas,
        str('asdf.reference_files'): reference_files
    }
def findModulesInPackage(package, name, fileNameFilters=[]):
    """
    Returns a dictionnary where the key is the path to the package or
subpackage. The value is the list of modules in which the string 'name'
was found.  Name can be a regular expression.Using '^' as a first symbol
to match string at the begining of the lines is faster.
    """

    if name[0]=='^':
        candidates = {}
        for root, dirs, files in os.walk(package):
            # remove directories not to visit
            for rem in ['CVS', 'regression', 'Tutorial', 'test', 'Doc', 'doc', 'Icons','Tests']:
                if rem in dirs:
                    dirs.remove(rem)
            # look for files that contain the string NodeLibrary
            newfiles = []
            for fi in files:
                if fi[-3:]=='.py' and not fi[0] in ['#', '.']:
                    for i in fileNameFilters:
                        if i in fi :
                            continue
                    Lines =[]        
                    f = open( os.path.join(root, fi) )
                    data = f.readlines()
                    f.close()
                    found = 0
                    Lines =filter(lambda x:x.startswith(name[1:]),data)
                    if Lines!=[]:
                        if not candidates.has_key(root): candidates[root] = []
                        candidates[root].append(fi)    
    else:  # use re
        import re
        pat = re.compile(name)
        
        candidates = {}
        for root, dirs, files in os.walk(package):
            # remove directories not to visit
            for rem in ['CVS', 'regression', 'Tutorial', 'test', 'Doc', 'doc', 'Icons','Tests']:
                if rem in dirs:
                    dirs.remove(rem)
            # look for files that contain the string NodeLibrary
            newfiles = []
            for fi in files:
                if fi[-3:]=='.py' and not fi[0] in ['#', '.']:
                    for i in fileNameFilters:
                        if i in fi :
                            continue
                    Lines =[]        
                    f = open( os.path.join(root, fi) )
                    data = f.readlines()
                    f.close()
                    found = 0
                    for line in data:
                        match = pat.search(line)
                        if match:
                            if not candidates.has_key(root): candidates[root] = []
                            candidates[root].append(fi)
                            break
    return candidates
Example #20
0
def convert_to_ascii(inputName, dirName):
    ascii_convert = int(nzbtomedia.CFG["ASCII"]["convert"])
    if ascii_convert == 0 or os.name == 'nt':  # just return if we don't want to convert or on windows os and "\" is replaced!.
        return inputName, dirName

    encoded, inputName = CharReplace(inputName)

    dir, base = os.path.split(dirName)
    if not base:  # ended with "/"
        dir, base = os.path.split(dir)

    encoded, base2 = CharReplace(base)
    if encoded:
        dirName = os.path.join(dir, base2)
        logger.info("Renaming directory to: %s." % (base2), 'ENCODER')
        os.rename(os.path.join(dir,base), dirName)
        if os.environ.has_key('NZBOP_SCRIPTDIR'):
            print "[NZB] DIRECTORY=%s" % (dirName)  # Return the new directory to NZBGet.

    for dirname, dirnames, filenames in os.walk(dirName, topdown=False):
        for subdirname in dirnames:
            encoded, subdirname2 = CharReplace(subdirname)
            if encoded:
                logger.info("Renaming directory to: %s." % (subdirname2), 'ENCODER')
                os.rename(os.path.join(dirname, subdirname), os.path.join(dirname, subdirname2))

    for dirname, dirnames, filenames in os.walk(dirName):
        for filename in filenames:
            encoded, filename2 = CharReplace(filename)
            if encoded:
                logger.info("Renaming file to: %s." % (filename2), 'ENCODER')
                os.rename(os.path.join(dirname, filename), os.path.join(dirname, filename2))

    return inputName, dirName
Example #21
0
def index_json(request):
  jsonp = request.REQUEST.get('jsonp', False)
  matches = []

  for whisper_dir in settings.WHISPER_DIRS:
    for root, dirs, files in os.walk(whisper_dir):
      root = root.replace(whisper_dir, '')
      for basename in files:
        if fnmatch.fnmatch(basename, '*.wsp'):
          matches.append(os.path.join(root, basename))

  for root, dirs, files in os.walk(settings.CERES_DIR):
    root = root.replace(settings.CERES_DIR, '')
    for filename in files:
      if filename == '.ceres-node':
        matches.append(root)

  matches = [
    m
    .replace('.wsp', '')
    .replace('.rrd', '')
    .replace('/', '.')
    .lstrip('.')
    for m in sorted(matches)
  ]
  return json_response_for(request, matches, jsonp=jsonp)
Example #22
0
def create_dependency_tree(research_dir):
    #print subprocess.check_output("find "+search_dir, shell=True)

    print "going.. " + research_dir
    total_file_num = 0
    file_num = 0
    for root, dirs, files in os.walk(research_dir):
        for new_file in files:
          total_file_num = total_file_num + 1 
  
    for root, dirs, files in os.walk(research_dir):
        #Analyze only files, not links
        for new_file in files:
          file_num = file_num + 1 
          print ("Analyze " + str(file_num) +"/"+ str(total_file_num) ) 
          sys.stdout.write("\033[F")
          pathname = os.path.join(root, new_file)
          mode = os.lstat(pathname).st_mode
          #links are skipped
          if S_ISLNK(mode): 
              #print "link " + pathname + " " + str(mode)
              pass
          elif S_ISREG(mode):
              # It's a file, call the recursive function to analyze it
              #print "analyze " + pathname            
              analyze(pathname, "nobody")
          else:
              # Unknown file type, print a message
              print 'Skipping %s' % pathname
              pass
def extract_rollouts(dataset='train', n_folders=20, n_images=1):
	"""
	Extracts rollout images from input folder,
	and copies it to dataset folder.
	"""
	for root, dirs, files in os.walk(rollout_dir):
		np.random.shuffle(dirs)
		for i in range(min(n_folders, len(dirs))):
			print "Folder {}".format(i)
			rollout_folder = dirs[i]
			for _, _, files in  os.walk(os.path.join(rollout_dir, rollout_folder)):
				images =  [x for x in files if x.endswith('.jpg')]
				if dataset == 'templates':
					np.random.shuffle(images)
					for im in images[:n_images]:
						src = os.path.join(rollout_dir, rollout_folder, im)
						dst = os.path.join(templates_dir, im)
						copy_processed_image(src, dst)
				elif dataset == 'test':
					im = sorted(images)[-1]
					print "Image: {}".format(im)
					src = os.path.join(rollout_dir, rollout_folder, im)
					dst, new_im_name = label_image(src, im, test_dir)
					copy_processed_image(src, dst)
					copy_state_label(rollout_dir, rollout_folder, im, new_im_name)
		break
Example #24
0
  def _read_descriptor_files(self):
    new_processed_files = {}
    remaining_files = list(self._targets)

    while remaining_files and not self._is_stopped.isSet():
      target = remaining_files.pop(0)

      if not os.path.exists(target):
        self._notify_skip_listeners(target, FileMissing())
        continue

      if os.path.isdir(target):
        if stem.prereq.is_python_26():
          walker = os.walk(target, followlinks = self._follow_links)
        else:
          walker = os.walk(target)

        self._handle_walker(walker, new_processed_files)
      else:
        self._handle_file(target, new_processed_files)

    self._processed_files = new_processed_files

    if not self._is_stopped.isSet():
      self._unreturned_descriptors.put(FINISHED)

    self._iter_notice.set()
Example #25
0
    def test_tmp_dir_normal_1(self):
        tempdir = tempfile.gettempdir()
        # assert temp directory is empty
        self.assertListEqual(list(os.walk(tempdir)),
            [(tempdir, [], [])])

        witness = []

        @with_tempdir
        def createfile(list):
            fd1, fn1 = tempfile.mkstemp()
            fd2, fn2 = tempfile.mkstemp()
            dir = tempfile.mkdtemp()
            fd3, fn3 = tempfile.mkstemp(dir=dir)
            tempfile.mkdtemp()
            list.append(True)
            for fd in (fd1, fd2, fd3):
                os.close(fd)

        self.assertFalse(witness)
        createfile(witness)
        self.assertTrue(witness)

        self.assertEqual(tempfile.gettempdir(), tempdir)

        # assert temp directory is empty
        self.assertListEqual(list(os.walk(tempdir)),
            [(tempdir, [], [])])
Example #26
0
def EmptyDir(d):
    if debugMode():
        print("EmptyDir",d)

    if d==None:
        return
    
    if os.path.isdir(d):
      files=os.walk(d)
      
      # delete all the files
      for item in files:
          for sdir in item[1]:
              EmptyDir(item[0]+os.sep+sdir)
          for f in item[2]:
              ff = item[0]+os.sep+f
              os.remove(ff)
              if debugMode():
                print("  removed",ff)
    else:
        os.mkdir(d)
        print("created",d)
    # delete any subdirectories
    dirs = os.walk(d)
    for dd in dirs:
        for ddir in dd[1]:
            EmptyDir(dd[0]+os.sep+ddir)
            os.rmdir(dd[0]+os.sep+ddir)

    if debugMode():
        print("all files deleted from",d)
Example #27
0
    def copy_template():
        config_prompt(template)
        shutil.copytree(template, name)

        if os.path.exists('%s/%s' % (name, 'config.yaml')):
            os.remove('%s/%s' % (name, 'config.yaml'))

        for dirname, dirnames, files in os.walk(name):
            for d in dirnames:
                if d == options.template:
                    shutil.copytree('%s/%s' % (dirname, d), '%s/%s' % (dirname, name))
                    shutil.rmtree('%s/%s' % (dirname, d))

        for dirname, dirnames, files in os.walk(name):
            for filename in files:
                f = open('%s/%s' % (dirname, filename), 'r')
                lines = f.readlines()
                f.close()

                first_pass = [re.sub('{{\s*(\w+)\s*}}', replace_variable, line) for line in lines]
                new_lines = [re.sub('__config_(\w+)__', replace_variable, line) for line in first_pass]

                f = open('%s/%s' % (dirname, filename), 'w')
                f.write(''.join(new_lines))
                f.close()
    def generate_file_map(self):
        # Read all the files in the given folder.
        # We gather them all and then send them up to GAE.
        # We do this rather than processing template locally. Because local processing
        file_map = dict()
        for root, dirs, files in os.walk(self.path):
            for filename in files:
                if any(filename.endswith(postfix) for postfix in ['.tracking', '.html', '.txt', '.yaml', '.js']):
                    contents = read_file(os.path.join(root, filename))
                    file_map[filename] = contents

        # Read all the image files for this partner. Obviously, this is inefficient, and we should probably
        # only read the files that are used in the html file.
        # But we have no facilities for this kind of processing here, since it is a PITA to install pip
        # packages through a sublimetext plugin.
        # But we might have to figure this out if it becomes a performance bottleneck. I think it is ok
        # as long as you are on a fast connection.
        # image_path = os.path.abspath(os.path.join(self.path, "img"))

        for root, dirs, files in os.walk(self.image_path):
            for filename in files:
                image_path = os.path.abspath(os.path.join(root, filename))
                contents = encode_image(image_path)
                file_map[filename] = contents

        return file_map
def native_report2(src):
	data = {}
	sum = 0
	c = ""
	for root, versions, ds in os.walk(src):
		if root != src:
			continue
		for version in sorted(versions, key = str.lower, reverse = True):
			sum = 0
			data = {}
			dd = os.path.join(root, version)
			for d_version, dirs, files in os.walk(dd):
				for d in dirs:
					p = os.path.join(d_version, d) + os.sep + "*.log"
					#p = os.path.join(root, d) + os.sep + "*"
					s = len(glob.glob(p))
					sum += s
					name = os.path.join(root, d) 
					if name.startswith(src):
						name = name[len(src):]
					if name.startswith("/"):
						name = name[1:]
					#data[name] = s
					name = d_version + os.sep + name
					data[name] = s
			c += html_report(data, sum, version) + "<br/><br/>"
			#c = "<br/><br/>" + html_report(data, sum)
	open(os.path.join(src, "index.html"), "w").write(c)
Example #30
0
def test_coerce_pycache_to_old_style():
    cwd = os.getcwd()
    with TemporaryDirectory() as tmp:
        os.makedirs(os.path.join(tmp, '__pycache__'))
        os.makedirs(os.path.join(tmp, 'testdir', '__pycache__'))
        with open(os.path.join(tmp, 'test.py'), 'w') as f:
            f.write("\n")
        with open(os.path.join(tmp, '__pycache__', 'test.cpython-{0}{1}.pyc'.format(
                sys.version_info.major, sys.version_info.minor)), 'w') as f:
            f.write("\n")
        with open(os.path.join(tmp, 'testdir', 'test.py'), 'w') as f:
            f.write("\n")
        with open(os.path.join(tmp, 'testdir', '__pycache__', 'test.cpython-{0}{1}.pyc'.format(
                sys.version_info.major, sys.version_info.minor)), 'w') as f:
            f.write("\n")

        os.chdir(tmp)
        for root, dirs, files in os.walk(tmp):
            fs = [os.path.join(root, _) for _ in files]
            post.coerce_pycache_to_old_style(fs, cwd=tmp)
        try:
            assert os.path.isfile(os.path.join(tmp, 'test.pyc')), os.listdir(tmp)
            assert os.path.isfile(os.path.join(tmp, 'testdir', 'test.pyc')), \
                os.listdir(os.path.join(tmp, 'testdir'))
            for root, dirs, files in os.walk(tmp):
                assert '__pycache__' not in dirs
        except:
            raise
        finally:
            os.chdir(cwd)