예제 #1
0
def remove_old_build_files(build_asset_info, build_path):
    old_build_files = []
    exludes = [
        path_join(build_path, 'sourcehashes.json'),
        path_join(build_path, 'cgfx2json.version'),
        path_join(build_path, 'json2json.version'),
        path_join(build_path, 'obj2json.version'),
        path_join(build_path, 'tga2png.version'),
        path_join(build_path, 'bmfont2json.version'),
        path_join(build_path, 'dae2json.version'),
        path_join(build_path, 'material2json.version')
    ]
    for base, _, files in os_walk(build_path):
        dir_files = [path_join(base, filename) for filename in files]
        old_build_files.extend(f for f in dir_files if f not in exludes)

    for asset_info in build_asset_info:
        try:
            old_build_files.remove(asset_info.build_path)
        except ValueError:
            pass

    for path in old_build_files:
        print 'Removing old build file ' + path
        remove_file(path)

    for base, _, _ in os_walk(build_path, topdown=False):
        try:
            rmdir(base)
        except OSError:
            pass
        else:
            print 'Removed old build directory ' + base
예제 #2
0
   def run(self):
      need_normal_clean = True
      exclude_files = []
      remove_files = []
      remove_dirs = []

      # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info`
      # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
      if self.all:
         need_normal_clean = True
         for dir_ in {'build', 'dist', 'cover'}:
            dir_path = path_join(ROOT_PACKAGE_PATH, dir_)
            if path_exists(dir_path):
               remove_dirs.append(dir_path)
         for root, dirs, files in os_walk(ROOT_PACKAGE_PATH):
            for dir_ in dirs:
               if '_pyxbld' in dir_ or 'egg-info' in dir_:
                  remove_dirs.append(path_join(root, dir_))

         # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
         for root, dirs, files in os_walk(MAIN_PACKAGE_PATH):
            for file_ in files:
               if file_ not in exclude_files:
                  if path_splitext(file_)[-1] in {'.so', '.c'}:
                     remove_files.append(path_join(root, file_))

                  tmp_name, tmp_ext = path_splitext(file_)
                  if tmp_ext == '.pyx':
                     # Check if we have a html with the same name
                     check_html_path = path_join(root, tmp_name + '.html')
                     if isfile(check_html_path):
                        remove_files.append(check_html_path)

      # do the general clean
      if need_normal_clean:
         for file_ in {'.coverage', 'MANIFEST'}:
            if path_exists(file_):
               remove_files.append(file_)

         for root, dirs, files in os_walk(ROOT_PACKAGE_PATH):
            for file_ in files:
               if file_ not in exclude_files:
                  if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}:
                     remove_files.append(path_join(root, file_))
            for dir_ in dirs:
               if '__pycache__' in dir_:
                  remove_dirs.append(path_join(root, dir_))

      # REMOVE ALL SELECTED
      # noinspection PyBroadException
      try:
         for file_ in remove_files:
            if path_exists(file_):
               os_remove(file_)
         for dir_ in remove_dirs:
            if path_exists(dir_):
               rmtree(dir_)
      except Exception:
         pass
예제 #3
0
파일: purge.py 프로젝트: subakva/velocette
def purgeDirs(path=os_curdir, verbose=True, dry_run=False):
    """Purge orphaned .pyc/.pyo files and remove emptied directories"""

    for dirname, dirs, files in os_walk(path, topdown=False):
        to_purge = [
            f for f in files
            if (f.endswith('.pyc') or f.endswith('.pyo'))
            and f[:-1] not in files     # don't purge if it has a .py
        ]
        for f in to_purge:
            filename = os_path.join(dirname, f)
            if verbose:
                print "deleting", filename
            if not dry_run:
                os_unlink(filename)

        if to_purge and files==to_purge:
            for d in dirs:
                # Do any of the subdirectories still exist?
                if os_path.exists(os_path.join(dirname, d)):
                    # If so, we've done all we can
                    break
            else:
                # Go ahread and remove the current directory
                if verbose:
                    print "removing ", dirname
                if not dry_run:
                    os_removedirs(dirname)
예제 #4
0
def test_all_imports_pyx():
   """ Tests: test_all_imports_pyx: for rebuild, syntax correctness and internal imports
   """
   print('::: TEST: test_all_imports_pyx()')
   remove_files = []
   remove_dirs = []
   all_modules_path = []
   for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH):
      all_modules_path.extend(glob(root + '/*.pyx'))
   for pyx_module_file_path in all_modules_path:
      module_filename = path_basename(pyx_module_file_path)
      module_filename_no_ext = path_splitext(module_filename)[0]

      cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path = build_cython_extension(
         pyx_module_file_path,
         cython_force_rebuild=True
      )

      so_loader = ExtensionFileLoader(module_filename_no_ext, cython_extension_module_path)
      so_loader.load_module(module_filename_no_ext)
      # add for cleanup
      remove_files.append(cython_module_c_file_path)
      remove_dirs.append(cython_build_dir_path)

   # Cleanup
   try:
      for file_ in remove_files:
         if path_exists(file_):
            os_remove(file_)
      for dir_ in remove_dirs:
         if path_exists(dir_):
            rmtree(dir_)
   except Exception as err:
      raise Exception('test_all_imports_pyx', 'Could not cython_clean_up: Exception: <{}>'.format(err))
def get_all_files( directories, ext_list ):
    for directory in directories:
        if os_path.isdir( directory ):
            for dirpath, dir, filenames in os_walk( directory ):
                for file in filenames:
                    path = os_path.abspath( os_path.join( dirpath, file ) )
                    junk, ext = os_path.splitext( path )
                    if ext in ext_list:
                        yield path
예제 #6
0
	def searchMusic(self):
		midx = 0
		for root, dirs, files in os_walk(self.MusicDir ):
			for name in files:
				name = name.lower()
				if name.endswith(".mp3") or name.endswith(".mp2") or name.endswith(".ogg") or name.endswith(".wav") or name.endswith(".flac") or name.endswith(".m4a"):
					self.musicList.append(name)
					if self.curFile in name:
						self.Mindex = midx
					midx = midx + 1
예제 #7
0
    def execute(self, cmd_args):

        config = self.config

        level = 'INFO'
        logger = get_logger(config, streaming=True, level=level)
        logger.info('Updating task list...')

        task_dir = config['taskdir']

        # get python files in task folder
        filepaths = []
        for dir_name, _, file_list in os_walk(task_dir):
            for fname in file_list:
                if fname.endswith('.py'):
                    filepaths.append(path.join(dir_name, fname))

        # get list of tasks
        task_list = []
        for filepath in filepaths:

            with open(filepath, 'r', encoding='utf-8') as file:
                text = file.read()

            parsed = ast.parse(text)

            classname = None
            method_name = None

            for node in ast.walk(parsed):

                if isinstance(node, ast.ClassDef):

                    CandidateTask = load_class(filepath, node.name)
                    if issubclass(CandidateTask, Task):
                        classname = node.name

                elif isinstance(node, ast.FunctionDef) and node.name == 'run':
                    method_name = node.name

            if classname and method_name:
                task_data = dict(
                    classname=classname,
                    filepath=filepath,
                    description='Task automatically parsed by Drummer')
                task_list.append(task_data)

                logger.info(f'Added task {classname} from file {filepath}')

        tasks_filename = path.join(config['base_dir'],
                                   'config/drummer-tasks.yml')
        write_yaml(tasks_filename, task_list)

        logger.info(f'Task update completed.')
예제 #8
0
def scanDevice(mountpoint):
	scanner = [ ]

	for p in plugins.getPlugins(PluginDescriptor.WHERE_FILESCAN):
		l = p()
		if not isinstance(l, list):
			l = [l]
		scanner += l

	print "scanner:", scanner

	res = { }

	# merge all to-be-scanned paths, with priority to 
	# with_subdirs.

	paths_to_scan = set()

	# first merge them all...
	for s in scanner:
		paths_to_scan.update(set(s.paths_to_scan))

	# ...then remove with_subdir=False when same path exists
	# with with_subdirs=True
	for p in paths_to_scan:
		if p.with_subdirs == True and ScanPath(path=p.path) in paths_to_scan:
			paths_to_scan.remove(ScanPath(path=p.path))

	from Components.Harddisk import harddiskmanager	
	blockdev = mountpoint.rstrip("/").rsplit('/',1)[-1]
	error, blacklisted, removable, is_cdrom, partitions, medium_found = harddiskmanager.getBlockDevInfo(blockdev)

	# now scan the paths
	for p in paths_to_scan:
		path = os_path.join(mountpoint, p.path)

                cmd = "ls " + path
                system(cmd)
		for root, dirs, files in os_walk(path):
			for f in files:
				path = os_path.join(root, f)
				if is_cdrom and path.endswith(".wav") and path[-13:-6] == ("/track-"):
					sfile = ScanFile(path,"audio/x-cda")
				else:
					sfile = ScanFile(path)
				for s in scanner:
					s.handleFile(res, sfile)

			# if we really don't want to scan subdirs, stop here.
			if not p.with_subdirs:
				del dirs[:]

	# res is a dict with scanner -> [ScanFiles]
	return res
예제 #9
0
def getFileList(dirName, fileType='xml'):
    listOfFiles = list()
    for (dirpath, _, filenames) in os_walk(dirName[0]):
        listOfFiles += [
            os_join(dirpath, file) for file in filenames
            if fileType in file.split('.')[-1]
        ]
    if LOGGING:
        message = '\n'.join(listOfFiles)
        with open('./log.txt', 'a') as f:
            f.write('\n\nFILE LIST:\n')
            f.write(message)
    return listOfFiles
예제 #10
0
 def size(self):
     """
     Return the total size (bytes) of all files in the directory.
     Courtesy: https://stackoverflow.com/a/1392549
     """
     total_size = 0
     for dpath, dnames, filenames in os_walk(self.path):
         for f in filenames:
             fp = os_path.join(dpath, f)
             # skip if it is symbolic link
             if not os_path.islink(fp):
                 total_size += os_path.getsize(fp)
     return total_size
예제 #11
0
def test_all_imports_py():
   """ Tests: test_all_imports_py: for syntax correctness and internal imports
   """
   print('::: TEST: test_all_imports_py()')
   all_modules_path = []
   for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH):
      all_modules_path.extend(glob(root + '/*.py'))
   for py_module_file_path in all_modules_path:
      module_filename = path_basename(py_module_file_path)
      module_filename_no_ext = path_splitext(module_filename)[0]

      py_loader = SourceFileLoader(module_filename_no_ext, py_module_file_path)
      py_loader.load_module(module_filename_no_ext)
    def __init__(self, directoryToScan, outputFile, prefix):
        self.templates = set()

        # NOTE: This literal must be absent in all the log files.
        self.VARIABLE_IN_TEMPLATE = chr(40960)
        self.outputFile = outputFile

        self.filesToExamine = []
        for subdir, dirs, files in os_walk(directoryToScan):
            for f in files:
                if f.startswith(prefix):
                    self.filesToExamine.append(subdir + OS_PATH_SEP + f)
        self.outputFile = outputFile
예제 #13
0
def main():
    from os import walk as os_walk
    from os.path import splitext
    from utils import mkpath

    ROOT = "../"
    EXTENSIONS = (".py", ".md")

    print("Replacing tabs with spaces...")
    for path, folders, files in os_walk(mkpath(ROOT)):
        for filename in files:
            if splitext(filename)[1] in EXTENSIONS:
                print(mkpath(path, filename))
                replace_tabs(mkpath(path, filename))
예제 #14
0
 def files_ls(path, extensions=None, kind=None) -> OrderedDict:
     _files = OrderedDict()
     if path is not None:
         for cur_path, _, files in os_walk(path):
             for f in files:
                 if extensions is None or op.splitext(f)[1] in extensions:
                     _files[Local._common_name(
                         op.join(cur_path, f),
                         path)] = Item(path=op.join(cur_path, f),
                                       kind=kind,
                                       size=op.getsize(op.join(cur_path,
                                                               f)),
                                       is_dir=False)
     return _files
예제 #15
0
파일: plugin.py 프로젝트: satdreamgr/Panel
	def Show_Picture(self):
		if self.whatPic is not None:
			self.EXpicload.setPara([self["Picture"].instance.size().width(), self["Picture"].instance.size().height(), self.EXscale[0], self.EXscale[1], 0, 1, "#002C2C39"])
			self.EXpicload.startDecode(self.whatPic)
		if self.whatDir is not None:
			pidx = 0
			for root, dirs, files in os_walk(self.whatDir ):
				for name in files:
					if name.endswith(".jpg") or name.endswith(".jpeg") or name.endswith(".Jpg") or name.endswith(".Jpeg") or name.endswith(".JPG") or name.endswith(".JPEG"):
						self.picList.append(name)
						if name in self.whatPic:
							self.Pindex = pidx
						pidx = pidx + 1
			files.sort()
예제 #16
0
def test_all_imports_py():
    """ Tests: test_all_imports_py: for syntax correctness and internal imports
   """
    print('::: TEST: test_all_imports_py()')
    all_modules_path = []
    for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH):
        all_modules_path.extend(glob(root + '/*.py'))
    for py_module_file_path in all_modules_path:
        module_filename = path_basename(py_module_file_path)
        module_filename_no_ext = path_splitext(module_filename)[0]

        py_loader = SourceFileLoader(module_filename_no_ext,
                                     py_module_file_path)
        py_loader.load_module(module_filename_no_ext)
예제 #17
0
    def draw_elements(self):
        self.frame_tree = ttk.Frame(master=self)
        self.tree = ttk.Treeview(master=self.frame_tree,
                                 columns='name start finish keywords',
                                 height=20)
        self.scroll_tree_y = ttk.Scrollbar(master=self.frame_tree,
                                           orient='vertical',
                                           command=self.tree.yview)
        self.tree.configure(yscroll=self.scroll_tree_y.set)
        self.tree.column('#0', width=70)
        self.tree.heading('#0', text=get_name('project_num'))
        self.tree.heading('name', text=get_name('name'))
        self.tree.column('start', width=110)
        self.tree.heading('start', text=get_name('start'))
        self.tree.column('finish', width=110)
        self.tree.heading('finish', text=get_name('finish'))
        self.tree.heading('keywords', text=get_name('keywords'))

        self.projects = []
        self.selected_proj = None
        for root, _, files in os_walk(settings["projects_dir"]):
            if project_file in files:
                self.projects.append(os_path.join(root, project_file))

        for ix, proj in enumerate(self.projects, start=1):
            self.tree.insert('', 'end', ix, text=ix)

            with open(proj, encoding='utf-8') as f:
                pd = json_load(f)

            self.tree.set(ix, 'name', pd['name'])
            self.tree.set(
                ix, 'start', ' '.join((pd['timeslot']['start']['date'],
                                       pd['timeslot']['start']['time'])))
            self.tree.set(
                ix, 'finish', ' '.join((pd['timeslot']['finish']['date'],
                                        pd['timeslot']['finish']['time'])))
            self.tree.set(ix, 'keywords', ' '.join(pd['keywords']))

        self.lbl = ttk.Label(master=self,
                             text='{0}: {1}'.format(
                                 get_name("total_num_of_proj"),
                                 len(self.projects)))

        self.lbl.pack(fill=X)
        self.frame_tree.pack(fill=BOTH, expand=1)
        self.tree.pack(fill=Y, expand=1, side=LEFT)
        self.scroll_tree_y.pack(fill=Y, expand=1, side=RIGHT)
        self.tree.bind('<Double-ButtonRelease-1>', self.choose_proj)
예제 #18
0
파일: Scanner.py 프로젝트: aitchala/enigma2
def scanDevice(mountpoint):
    scanner = []

    for p in plugins.getPlugins(PluginDescriptor.WHERE_FILESCAN):
        l = p()
        if not isinstance(l, list):
            l = [l]
        scanner += l

    print "scanner:", scanner

    res = {}

    # merge all to-be-scanned paths, with priority to
    # with_subdirs.

    paths_to_scan = set()

    # first merge them all...
    for s in scanner:
        paths_to_scan.update(set(s.paths_to_scan))

    # ...then remove with_subdir=False when same path exists
    # with with_subdirs=True
    for p in paths_to_scan:
        if p.with_subdirs == True and ScanPath(path=p.path) in paths_to_scan:
            paths_to_scan.remove(ScanPath(path=p.path))

    cdfsRegex = re_compile('^track-[0-9]{2}.wav$')

    # now scan the paths
    for p in paths_to_scan:
        path = os_path.join(mountpoint, p.path)

        for root, dirs, files in os_walk(path):
            for f in files:
                mimeType = None
                if cdfsRegex.match(f):
                    mimeType = "audio/x-cda"
                sfile = ScanFile(os_path.join(root, f), mimeType)
                for s in scanner:
                    s.handleFile(res, sfile)

            # if we really don't want to scan subdirs, stop here.
            if not p.with_subdirs:
                del dirs[:]

    # res is a dict with scanner -> [ScanFiles]
    return res
예제 #19
0
파일: Scanner.py 프로젝트: popazerty/12
def scanDevice(mountpoint):
	scanner = [ ]

	for p in plugins.getPlugins(PluginDescriptor.WHERE_FILESCAN):
		l = p()
		if not isinstance(l, list):
			l = [l]
		scanner += l

	print "scanner:", scanner

	res = { }

	# merge all to-be-scanned paths, with priority to 
	# with_subdirs.

	paths_to_scan = set()

	# first merge them all...
	for s in scanner:
		paths_to_scan.update(set(s.paths_to_scan))

	# ...then remove with_subdir=False when same path exists
	# with with_subdirs=True
	for p in paths_to_scan:
		if p.with_subdirs == True and ScanPath(path=p.path) in paths_to_scan:
			paths_to_scan.remove(ScanPath(path=p.path))

	cdfsRegex = re_compile('^track-[0-9]{2}.wav$')

	# now scan the paths
	for p in paths_to_scan:
		path = os_path.join(mountpoint, p.path)

		for root, dirs, files in os_walk(path):
			for f in files:
				mimeType = None
				if cdfsRegex.match(f):
					mimeType = "audio/x-cda"
				sfile = ScanFile(os_path.join(root, f), mimeType)
				for s in scanner:
					s.handleFile(res, sfile)

			# if we really don't want to scan subdirs, stop here.
			if not p.with_subdirs:
				del dirs[:]

	# res is a dict with scanner -> [ScanFiles]
	return res
예제 #20
0
def copy_files(source_dir, dest_dir, dest_only=False, ext_only=None):
    """ Copy files from @source_dir to @dest_dir.

        @param source_dir Directory from where to take files.
        @param dest_dir Directory where to copy files.
        @dest_only Determines if @dest_dir's files only will be copied.
        @ext_only Allowed for copy file extension or iterable of extensions
    """
    source_dir = os_path.expanduser(source_dir)
    dest_dir = os_path.expanduser(dest_dir)

    if not os_path.isdir(source_dir):
        print(" ".join((">>>", source_dir, "is not a directory or there is no such directory")))
        return

    elif not os_path.isdir(dest_dir):
        print(" ".join((">>>", dest_dir, "is not a directory or there is no such directory")))
        return

    #determine files to copy
    if dest_only:
        files_to_copy = (file_name for file_name in next(os_walk(source_dir))[2] if file_name in set(next(os_walk(dest_dir))[2]))
    else:
        files_to_copy = (file_name for file_name in next(os_walk(source_dir))[2])

    #filter by extensions
    if ext_only:
        ext_only = set([ext_only]) if isinstance(ext_only, str) else set(ext_only)
        files_to_copy = (file_name for file_name in files_to_copy if file_name.rsplit(".", 1)[-1] in ext_only)

    for file_name in files_to_copy:
        try:
            copy(os_path.join(source_dir, file_name), dest_dir)
        except IOError as errno:
            print(" ".join(("Cannot copy", file_name, "from", source_dir, "to", dest_dir)))
            print(" ".join((">>>Exception:", str(errno))))
예제 #21
0
def main(directory, out_file, secondary=False):
    (_, _, fnames) = os_walk(directory).next()

    descriptions = {}
    sys.stderr.write("\n")

    for ix, fname in enumerate(fnames):
        with open(directory + "/" + fname, 'rU') as f:
            descriptions[fname] = describe_file(f)
            sys.stderr.write("\rProcessed %d" % (ix + 1))
            sys.stderr.flush()

    sys.stderr.write("\n")

    MIN_SIZE = 0
    file_scores = [
        f for f, (line_count, uniqs, size) in descriptions.items()
        if (uniqs * size) >= MIN_SIZE
    ]
    #    file_scores.sort(reverse = True)
    #    top_files = [ f for (_, f) in file_scores[:20] ]

    uniqs = dict([(f, uniqs) for f, (_, uniqs, _) in descriptions.items()])
    max_sizes = dict([(f, max_size)
                      for f, (_, _, max_size) in descriptions.items()])

    trace_files_infos = {}
    trace_files_infos['fnames'] = file_scores
    trace_files_infos['uniqs'] = uniqs
    trace_files_infos['max_item_szs'] = max_sizes

    cache_sizes = {}

    if secondary:
        with open(secondary, 'r') as read_allotments:
            for line in read_allotments:
                if line == "":
                    continue
                line = line.split(' --> ')
                name = "app_%s.traces" % line[0].strip()
                allotted = line[1].strip()
                if name in file_scores:
                    cache_sizes[name] = allotted

    trace_files_infos['cache_sizes'] = cache_sizes

    with open(out_file, 'w') as out:
        json.dump(trace_files_infos, out)
예제 #22
0
 def compressDirectory(self, output, format, target):
     if format == "zip":
         try:
             zipf = ZipFile(output + ".zip", 'w', ZIP_DEFLATED)
             for root, dirs, files in os_walk(target):
                 for file in files:
                     zipf.write(os_path.join(root, file),
                                os_path.basename(os_path.join(root, file)))
             zipf.close()
         except Exception as e:
             try:
                 shutil_make_archive(output, "zip", target)
             except Exception as e:
                 raise Exception("Failed while compressing directory")
     else:
         raise Exception(
             "Failed while compressing directory. Format not supported")
예제 #23
0
    def listar_arquivos2(self, alvo):
        """Lista os arquivos de um diretório para o backup"""

        lista_arquivos = []
        for folder, _, file in os_walk(alvo):
            for f in file:

                # Arquivo a ser movido
                arquivo = os_path.join(folder, f)

                # Não atualizar o arquivo de atualização
                if 'Upgrade' in arquivo or 'backups/' in arquivo or '.git/' in arquivo or 'github/ISSUE_TEMPLATE' in arquivo or '__pycache__' in arquivo:
                    #print("Arquivo update ignorado")
                    continue

                lista_arquivos.append(arquivo)
        return lista_arquivos
예제 #24
0
def gen_file_list(args):
    """ Generate list of picutres """
    allowed_ext = {"png", "jpeg", "jpg"}

    for arg in args:
        if isfile(arg):
            if not arg.rsplit(".", 1)[-1] in allowed_ext:
                print("".join((">>>File ", arg, " is not png or jpg/jpeg")))
            yield arg

        elif isdir(arg):
            for pic in (file_name for file_name in next(os_walk(arg))[2] if file_name.rsplit(".", 1)[-1] in allowed_ext):
                yield path_join(arg, pic)

        else:
            print("".join((">>>", arg, " no such file or directory")))
            continue
def loadFramesFromFrames(path):
    global frames, frame_count, source_width, source_height, result, count, ones, cap
    fc = 0
    for root, dirs, files in os_walk(path):
        fc = len(files)
        files.sort()
        for file in files:
            fp = os_path_join(root, file)
            print(f"Reading {fp}")
            frames.append(cv2.imread(fp, cv2.IMREAD_COLOR))

    cap = frames[0]
    source_height, source_width, _ = cap.shape
    result = np.zeros((source_height // 4 * 3, source_width // 4 * 3, 3))
    count = np.zeros((source_height // 4 * 3, source_width // 4 * 3))
    ones = np.ones(((source_height // 4, source_width // 4)))
    frame_count = fc
예제 #26
0
 def dbfs_ls(path) -> OrderedDict:
     _objects = OrderedDict()
     if path is not None:
         for cur_path, dirs, files in os_walk(path):
             for f in files:
                 _objects[Local._common_dbfs_name(
                     op.join(cur_path, f),
                     path)] = Item(path=op.join(cur_path, f),
                                   kind='dbfs file',
                                   size=op.getsize(op.join(cur_path, f)),
                                   is_dir=False)
             for d in dirs:
                 _objects[Local._common_dbfs_name(op.join(
                     cur_path, d), path)] = Item(path=op.join(cur_path, d),
                                                 kind='dbfs directory',
                                                 is_dir=True)
     return _objects
예제 #27
0
 def prepareListForSelector(self, myPath, myFilter = "" , SelectorTitle = "Select item" ):
     self.myList = []
     for root, dirs, files in os_walk(myPath):
         files.sort()
         for file in files:
             if file.endswith(".png"):
                 #print os_path.join(root, file)
                 if myFilter == "":
                     self.myList.append( ( file[:-4] , os_path.join(root, file) ) )
                 elif not file.find(myFilter):
                     self.myList.append( ( file[:-4] , os_path.join(root, file) ) )
                   
     #print self.myList
     if len(self.myList) >= 1:
         self.session.openWithCallback(self.SelectorCallback, SelectorWidget, list = self.myList, CurIdx = self.curIndex, Mytitle = SelectorTitle )
     else:
         self.close()
     return
예제 #28
0
def gen_file_list(args):
    """ Generate list of picutres """
    allowed_ext = {"png", "jpeg", "jpg"}

    for arg in args:
        if isfile(arg):
            if not arg.rsplit(".", 1)[-1] in allowed_ext:
                print("".join((">>>File ", arg, " is not png or jpg/jpeg")))
            yield arg

        elif isdir(arg):
            for pic in (file_name for file_name in next(os_walk(arg))[2]
                        if file_name.rsplit(".", 1)[-1] in allowed_ext):
                yield path_join(arg, pic)

        else:
            print("".join((">>>", arg, " no such file or directory")))
            continue
예제 #29
0
 def workspace_ls(path) -> OrderedDict:
     _objects = OrderedDict()
     if path is not None:
         for cur_path, dirs, files in os_walk(path):
             for f in files:
                 if op.splitext(f)[1] in NOTEBOOK_EXTENSIONS:
                     _objects[Local._common_name(
                         op.join(cur_path, f),
                         path)] = Item(path=op.join(cur_path, f),
                                       kind='workspace notebook',
                                       language=NOTEBOOK_EXTENSIONS[
                                           op.splitext(f)[1]],
                                       is_dir=False)
             for d in dirs:
                 _objects[Local._common_name(op.join(
                     cur_path, d), path)] = Item(path=op.join(cur_path, d),
                                                 kind='workspace directory',
                                                 is_dir=True)
     return _objects
예제 #30
0
def ci_cmd_exec(cam, baseName, expectedStrings, unexpectedStrings=None):
    print('Executing %s script...' % baseName)
    scriptRoot = os_path.realpath(
        os_path.dirname(inspect.getfile(inspect.currentframe())))
    scriptPath = [
        os_path.join(dp, f) for dp, dn, filenames in os_walk(scriptRoot)
        for f in filenames if os_path.splitext(f)[0] == baseName
    ][0]
    try:
        result = cam.launchScript(scriptPath).replace('\0', '')
    except Exception as e:
        print('#ERROR: Script was not executed. Reason: ', e)
        return False
    print('CAM response:')
    print('---------------')
    print(result)
    print('---------------')
    testResult = cam.checkResponse(result, expectedStrings, unexpectedStrings)
    return testResult
예제 #31
0
 def filter(self, inputFilePrefix, directoryToScan, outputDirectory, word):
     for subdir, dirs, files in os_walk(directoryToScan):
         for file in files:
             if file.startswith(inputFilePrefix):
                 # Valid file.
                 infile = subdir + OS_PATH_SEP + file
                 if outputDirectory == None:
                     outputDirectory = subdir + OS_PATH_SEP + 'Filtered_Files'
                 if not check_directory_exists(outputDirectory):
                     make_directory(outputDirectory)
                 outfile = outputDirectory + OS_PATH_SEP + file
                 with open(infile, 'r', encoding = "utf8") as inf, open(outfile, 'w+', encoding = 'utf8') as of:
                     for line in inf:
                         if word in line:
                             # Skip the line.
                             continue
                         else:
                             # Write to the new file.
                             of.write(line)
예제 #32
0
    def load_tests(_, tests, __):  # pylint: disable=redefined-outer-name,unused-argument
        finder = DocTestFinder(exclude_empty=False)

        for root_mod in roots:
            if isinstance(root_mod, ModuleType):
                root_mod_path, root_mod_name = root_mod.__file__, root_mod.__name__
            else:
                root_mod_path, root_mod_name = root_mod

            if splitext(basename(root_mod_path))[0] == "__init__":
                root_mod_path = dirname(root_mod_path)

            if isfile(root_mod_path):
                root_mod_iter = ((dirname(root_mod_path), None, (basename(root_mod_path),)),)
            else:
                root_mod_iter = os_walk(root_mod_path)

            for dir_name, _, file_names in root_mod_iter:
                if not re_match(re_escape(root_mod_path) + _PATH_RE, dir_name):
                    continue

                mod_name = dir_name[len(root_mod_path) :].replace(ospath_sep, ".").strip(".")

                if mod_name:
                    mod_name = root_mod_name + "." + mod_name
                else:
                    mod_name = root_mod_name

                for file_name in file_names:
                    if not file_name.endswith(".py"):
                        continue

                    if file_name == "__init__.py":
                        test_mod_name = mod_name
                    else:
                        test_mod_name = mod_name + "." + splitext(file_name)[0]

                    try:
                        tests.addTest(DocTestSuite(test_mod_name, test_finder=finder))
                    except Exception as err:  # pylint: disable=broad-except
                        _LOGGER.warning("unable to load doctests from %s (%s)", test_mod_name, err, exc_info=True)

        return tests
예제 #33
0
def ZieldateienFinden(zielordner, dateimuster, ignoriereOrdnernamen=['alt']):
    """Ermittle im zielordner und allen Unterordnern alle Dateien, deren Namen dateimuster enthaelt.
   Falls ignoriereOrdnernamen keine leere Liste ist, werden alle Unterordner in der Liste ignoriert
   (deren Name einem der Eintraege entspricht).
   Gibt eine Liste aller Dateinamen der so ausgewaehlten Dateien zurueck.
   
   Kann auch zum Testen von Bodenmusterdatei-Eintraegen verwendet werden. Wenn Pfad und regulaerer
   Ausdruck richtig gewaehlt sind, sollten exakt die gewuenschten Dateien aufgelistet/eingelesen
   werden. Fuer die Wahl von sinnvollen regulaeren Ausdrucken sei auf das Python-Modul re und
   die offizielle Dokumentation verwiesen (Standard Library Reference unter https://docs.python.org)
   """
    from os import walk as os_walk
    from os import sep as os_sep
    from os.path import join as os_join
    from re import compile as re_compile
    from re import search as re_search
    #
    if ((dateimuster == '*') or (dateimuster == '*.*')):
        dateimuster = ''
    #
    if (zielordner == ''):
        zielordner = '.'
    #
    if (zielordner[-1] != os_sep):
        zielordner += os_sep
    #
    remuster = re_compile(dateimuster)
    zieldateiliste = []
    for (pfad, _, dateiliste) in os_walk(zielordner):
        # Ignoriere alle Dateien aus (Unter-)ordner eines Eintrags aus ignoriereOrdnernamen
        unterpfad = pfad[len(zielordner):].split(os_sep)
        if (any([pfadteil in ignoriereOrdnernamen for pfadteil in unterpfad])):
            continue
        #
        for datei in dateiliste:
            if (re_search(remuster, datei)):
                zieldateiliste += [os_join(pfad, datei)]
    #
    if (len(zieldateiliste) > 0):
        zieldateiliste.sort()
    #
    return zieldateiliste
예제 #34
0
def test_all_py_to_cython_compiled():
   """ Tests: test_all_py_to_cython_compiled: for syntax correctness and internal imports: all .py files compiled with
   cython: except '__init__'
   """
   print('::: TEST: test_all_py_to_cython_compiled()')
   remove_files = []
   remove_dirs = []

   all_modules_path = []
   for root, dirnames, filenames in os_walk(ROOT_PACKAGE_PATH):
      all_modules_path.extend(glob(root + '/*.py'))
   for py_module_file_path in all_modules_path:
      module_filename = path_basename(py_module_file_path)
      module_filename_no_ext = path_splitext(module_filename)[0]
      if '__init__' in module_filename:
         continue

      cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path = build_cython_extension(
         py_module_file_path,
         cython_force_rebuild=True
      )

      # noinspection PyUnusedLocal
      so_loader = ExtensionFileLoader(module_filename_no_ext, cython_extension_module_path)

      # sometimes (if a extension module is build previously) the loading does not work with 'nose tests'
      # so_loader.load_module(module_filename_no_ext)

      # add for cleanup : inclusive the .so extension file
      remove_files.extend([cython_module_c_file_path, cython_extension_module_path])
      remove_dirs.append(cython_build_dir_path)

   # Cleanup
   try:
      for file_ in remove_files:
         if path_exists(file_):
            os_remove(file_)
      for dir_ in remove_dirs:
         if path_exists(dir_):
            rmtree(dir_)
   except Exception as err:
      raise Exception('test_all_py_to_cython_compiled', 'Could not cython_clean_up: Exception: <{}>'.format(err))
예제 #35
0
    def listar_arquivos(self, alvo, versao):
        """Lista os arquivos de um diretório baixado"""

        # Futuramente vamos unificar as funções, tem muito código duplicado
        lista_arquivos = []
        for folder, _, file in os_walk(alvo):
            for f in file:

                # Arquivo a ser movido
                arquivo = os_path.join(folder, f)

                # Não atualizar o arquivo de atualização
                if 'Upgrade' in arquivo or 'backups/' in arquivo:
                    print("Arquivo update ignorado")
                    continue

                arquivo = arquivo.split('safira-ide-{}'.format(versao))[1]

                lista_arquivos.append(arquivo)
        return lista_arquivos
예제 #36
0
def get_files(file_list):
    """ Returns a list of all the files in filename.

    """

    from os.path import isdir as os_isdir
    from os.path import isdir as os_isfile
    from os.path import join as os_join
    from os import walk as os_walk

    out_list = []

    for name in file_list:
        if os_isdir(name):
            for root, sub, files in os_walk(name):
                join_list = [os_join(root, f) for f in files]
                out_list.extend(join_list)
        else:
            out_list.append(name)

    return out_list
예제 #37
0
파일: plugin.py 프로젝트: torac/enigma2
 def execBegin(self):
     self.text.setText(_("Scanning %s...") % (self.providerName))
     self.progressbar.setValue(0)
     self.scan = eFastScan(self.scanPid, self.providerName, self.keepNumbers, self.keepSettings)
     self.scan.scanCompleted.get().append(self.scanCompleted)
     self.scan.scanProgress.get().append(self.scanProgress)
     fstfile = None
     fntfile = None
     for root, dirs, files in os_walk("/tmp/"):
         for f in files:
             if f.endswith(".bin"):
                 if "_FST" in f:
                     fstfile = os_path.join(root, f)
                 elif "_FNT" in f:
                     fntfile = os_path.join(root, f)
     if fstfile and fntfile:
         self.scan.startFile(fntfile, fstfile)
         os_unlink(fstfile)
         os_unlink(fntfile)
     else:
         self.scan.start(self.scanTuner)
예제 #38
0
파일: plugin.py 프로젝트: OpenLD/enigma2
	def doServiceScan(self):
		self["scan_state"].setText(_('Scanning %s...') % (self.providerName))
		self["scan_progress"].setValue(0)
		self.scan = eFastScan(self.scanPid, self.providerName, self.transponderParameters, self.keepNumbers, self.keepSettings)
		self.scan.scanCompleted.get().append(self.scanCompleted)
		self.scan.scanProgress.get().append(self.scanProgress)
		fstfile = None
		fntfile = None
		for root, dirs, files in os_walk('/tmp/'):
			for f in files:
				if f.endswith('.bin'):
					if '_FST' in f:
						fstfile = os_path.join(root, f)
					elif '_FNT' in f:
						fntfile = os_path.join(root, f)
		if fstfile and fntfile:
			self.scan.startFile(fntfile, fstfile)
			os_unlink(fstfile)
			os_unlink(fntfile)
		else:
			self.scan.start(self.scanTuner)
예제 #39
0
	def doServiceScan(self):
		self["scan_state"].setText(_('Scanning %s...') % (self.providerName))
		self["scan_progress"].setValue(0)
		self.scan = eFastScan(self.scanPid, self.providerName, self.transponderParameters, self.keepNumbers, self.keepSettings)
		self.scan.scanCompleted.get().append(self.scanCompleted)
		self.scan.scanProgress.get().append(self.scanProgress)
		fstfile = None
		fntfile = None
		for root, dirs, files in os_walk('/tmp/'):
			for f in files:
				if f.endswith('.bin'):
					if '_FST' in f:
						fstfile = os_path.join(root, f)
					elif '_FNT' in f:
						fntfile = os_path.join(root, f)
		if fstfile and fntfile:
			self.scan.startFile(fntfile, fstfile)
			os_unlink(fstfile)
			os_unlink(fntfile)
		else:
			self.scan.start(self.scanTuner)
예제 #40
0
파일: import_util.py 프로젝트: zepto/musio
def _build_mod_list(mod_path: list) -> list:
    """ _build_mod_list(mod_path, suffix) -> Add all the paths in mod_path to
    sys.path and return a list of all modules in sys.path ending in suffix.

    """

    mod_path = [mod_path] if type(mod_path) is str else mod_path

    # Add the path of this file to the search path.
    mod_path.append(os_abspath(os_dirname(__file__)))

    # Build the list of modules in mod_path(s).
    mod_list = ('{0}.{1}.{2}'.format(os_basename(path), \
                    os_basename(root).replace(os_basename(path), ''), \
                    name.rsplit('.', 1)[0]).replace('..', '.') \
                    for path in mod_path \
                        if os_isdir(path) \
                            for root, dirs, files in os_walk(path) \
                                for name in files \
                                    if name.endswith('.py'))

    return mod_list
예제 #41
0
    def draw_elements(self):
        self.frame_tree = ttk.Frame(master=self)
        self.tree = ttk.Treeview(master=self.frame_tree, columns='name start finish keywords', height=20)
        self.scroll_tree_y = ttk.Scrollbar(master=self.frame_tree, orient='vertical', command=self.tree.yview)
        self.tree.configure(yscroll=self.scroll_tree_y.set)
        self.tree.column('#0', width=70)
        self.tree.heading('#0', text=get_name('project_num'))
        self.tree.heading('name', text=get_name('name'))
        self.tree.column('start', width=110)
        self.tree.heading('start', text=get_name('start'))
        self.tree.column('finish', width=110)
        self.tree.heading('finish', text=get_name('finish'))
        self.tree.heading('keywords', text=get_name('keywords'))

        self.projects = []
        self.selected_proj = None
        for root, _, files in os_walk(settings["projects_dir"]):
            if project_file in files:
                self.projects.append(os_path.join(root, project_file))

        for ix, proj in enumerate(self.projects, start=1):
            self.tree.insert('', 'end', ix, text=ix)

            with open(proj, encoding='utf-8') as f:
                pd = json_load(f)

            self.tree.set(ix, 'name', pd['name'])
            self.tree.set(ix, 'start', ' '.join((pd['timeslot']['start']['date'], pd['timeslot']['start']['time'])))
            self.tree.set(ix, 'finish', ' '.join((pd['timeslot']['finish']['date'], pd['timeslot']['finish']['time'])))
            self.tree.set(ix, 'keywords', ' '.join(pd['keywords']))

        self.lbl = ttk.Label(master=self,
                             text='{0}: {1}'.format(get_name("total_num_of_proj"), len(self.projects)))

        self.lbl.pack(fill=X)
        self.frame_tree.pack(fill=BOTH, expand=1)
        self.tree.pack(fill=Y, expand=1, side=LEFT)
        self.scroll_tree_y.pack(fill=Y, expand=1, side=RIGHT)
        self.tree.bind('<Double-ButtonRelease-1>', self.choose_proj)
예제 #42
0
def _build_mod_list(mod_path):
    """ _build_mod_list(mod_path, suffix) -> Add all the paths in mod_path to
    sys.path and return a list of all modules in sys.path ending in suffix.

    """

    mod_path = [mod_path] if type(mod_path) is str else mod_path

    # Add the path of this file to the search path.
    mod_path.append(os_abspath(os_dirname(__file__)))

    # Build the list of modules in mod_path(s).
    mod_list = ('{0}.{1}.{2}'.format(os_basename(path), \
                    os_basename(root).replace(os_basename(path), ''), \
                    name.rsplit('.', 1)[0]).replace('..', '.') \
                    for path in mod_path \
                        if os_isdir(path) \
                            for root, dirs, files in os_walk(path) \
                                for name in files \
                                    if name.endswith('.py'))

    return mod_list
예제 #43
0
def get_files(file_list):
    """ Returns a list of all the files in filename.

    """

    from os.path import isdir as os_isdir
    from os.path import isdir as os_isfile
    from os.path import join as os_join
    from os import walk as os_walk
    from pathlib import Path

    out_list = []
    ext = ['.mp3', '.flac', '.ogg', '.s3m', '.mod', '.xm', '.it']

    for name in file_list:
        if os_isdir(name):
            for root, sub, files in os_walk(name):
                join_list = [os_join(root, f) for f in files if Path(f.lower()).suffix in ext]
                out_list.extend(join_list)
        else:
            out_list.append(name)

    return out_list
예제 #44
0
    def Show_Picture(self):
        if self.whatPic is not None:
            self.EXpicload.setPara([
                self['Picture'].instance.size().width(),
                self['Picture'].instance.size().height(), self.EXscale[0],
                self.EXscale[1], 0, 1, '#002C2C39'
            ])
            self.EXpicload.startDecode(self.whatPic)
        if self.whatDir is not None:
            pidx = 0
            for root, dirs, files in os_walk(self.whatDir):
                for name in files:
                    if name.endswith('.jpg') or name.endswith(
                            '.jpeg') or name.endswith('.Jpg') or name.endswith(
                                '.Jpeg') or name.endswith(
                                    '.JPG') or name.endswith('.JPEG'):
                        self.picList.append(name)
                        if name in self.whatPic:
                            self.Pindex = pidx
                        pidx = pidx + 1

            files.sort()
        return
예제 #45
0
def collect(infolder,
            line  = comment_LINE,
            block = comment_BLOCK,
            tags  = WORDS,
            marks = MARKS,
            include=INCLUDE,
            exclude=EXCLUDE,
            overwrite=False):
    # Process block comment marks
    blocks_open, blocks_close = comment_block_comments(block)

    # TODO: Make hidden files OS independent, probably using
    #       https://docs.python.org/3.4/library/tempfile.html ?

    # FIXME: for some reason, if a comment-type ever existed in the TODO
    #        file, but after a while its posts are all gone, the keyword
    #        still remains there, according to the current TODO file,
    #        which still have the "QUESTIONS" keyword, and comment

    # TODO: Add explicit-remove/browsing capabilities of the .*_cache files
    #       (for example: if git reverted changes --> remove hash from cache file)
    #       The best solution would be a complete CLI tool, to read and manage
    #       and use the cutils command line tools

    # Compile regular expression patterns
    pattern1 = re_compile(_COMMENT.format(r'|'.join(map(comment_escape, line)),
                                          blocks_open,
                                          r'|'.join(map(comment_escape, tags)),
                                          r'|'.join(map(comment_escape, marks)),
                                          blocks_close),
                         flags=re_IGNORECASE | re_DOTALL | re_MULTILINE | re_VERBOSE)
    pattern2 = re_compile(r'\n')

    # Get previously generated collection of all posts
    COLLECTED = os_path_join(infolder, '.ccom_todo')
    try:
        with open(COLLECTED, 'rb') as file:
            collected = pickle_load(file)
    except (FileNotFoundError, EOFError):
        collected = table_Table(row=OrderedDict)

    # Clear cache -- remove all non-existing files
    for filepath in collected.rows():
        if not os_path_isfile(filepath):
            del collected[filepath]

    # Exception containers
    except_dirs  = []  # relative path to dir from root
    except_files = []  # relative path to file from root
    except_names = []  # filename (with extension) anywhere
    except_exts  = []  # extension anywhere

    # If 'exclude' is dictionary like object
    try:
        _empty = ()
        # Exceptions relative to root
        for key, container in zip(('folders', 'files'),
                                  (except_dirs, except_files)):
            container.extend(os_path_join(infolder, p) for p in exclude.get(key, _empty))
        # Exceptions anywhere
        for key, container in zip(('names', 'extensions'),
                                  (except_names, except_exts)):
            container.extend(exclude.get(key, _empty))
    # If 'exclude' is an iterable object
    except AttributeError:
        except_names = exclude

    # Include containers
    permit_names = []  # filename (with extension) anywhere
    permit_exts  = []  # extension anywhere

    # If 'include' is dictionary like object
    try:
        _empty = ()
        # Includes anywhere
        for key, container in zip(('names', 'extensions'),
                                  (permit_names, permit_exts)):
            container.extend(include.get(key, _empty))
    # If 'include' is an iterable object
    except AttributeError:
        permit_names = include

    # Scan through all files and folders
    with check_Checker(infolder, file='.ccom_cache') as checker:
        for root, dirs, filenames in os_walk(infolder):
            # If skip this folder and all subfolders
            if root in except_dirs:
                dirs.clear()
                continue
            # Check all files in folder
            for filename in filenames:
                filepath = os_path_join(root, filename)[2:]
                # If skip this exact file
                if filepath in except_files:
                    continue
                name, extension = os_path_splitext(filename)
                # If file or extension is not banned and it is on the
                # white-list and it changed since last time checked and
                # this is not and overwrite-call
                if (filename not in except_names and
                    extension not in except_exts and
                    (extension in permit_exts or filename in permit_names) and
                    checker.ischanged(filepath) and
                    not overwrite):
                    with open(filepath, encoding='utf-8') as file:
                        _search(collected, pattern1, pattern2,
                                file.read(), filepath, marks)

    # Save collection of all posts
    with open(COLLECTED, 'wb') as file:
        pickle_dump(collected, file, pickle_HIGHEST_PROTOCOL)

    # Open the todo file and write out the results
    with open('TODO', 'w', encoding='utf-8') as todo:
        # Make it compatible with cver.py
        todo.write('## INFO ##\n'*2)
        # Format TODO file as yaml
        for key in itertools_chain(tags, marks.values()):
            KEY = key.upper()
            try:
                types = collected[KEY].items()
                len_pos = todo.tell()
                # Offset for separator comment and
                # leading and trailing new lines
                todo.write(' '*82)
                todo.write('{}:\n'.format(KEY))
                index = 1
                for filename, posts in types:
                    for i, (linenumber, content) in enumerate(posts, start=index):
                        todo.write(_ITEM.format(msg='\n'.join(content),
                                                index=i,
                                                short=_SHORT,
                                                long=_SHORT*2,
                                                sep='- '*38,
                                                file=filename,
                                                line=linenumber))
                    index = i + 1
                todo.write('\n')
                # Move back to tag separator comment
                todo.seek(len_pos)
                todo.write('\n#{:-^78}#\n'.format(
                    ' {} POSTS IN {} FILES '.format(index - 1, len(types))))
                # Move back to the end
                todo.seek(0, 2)
            except KeyError:
                continue
        print('CCOM: placed {!r}'.format(os_path_join(infolder, 'TODO')))
예제 #46
0
	def removeFiles(self, targetdir):
		for root, dirs, files in os_walk(targetdir):
			for name in files:
				os_remove(os_path.join(root, name))
예제 #47
0
 def deletePicons(self):
     myPath = resolveFilename(SCOPE_SKIN_IMAGE, 'picon/')
     for root, dirs, files in os_walk(myPath):
         for name in files:
             #print name
             os_remove(os_path.join(root, name))
예제 #48
0
    def run(self):
        stop = False

        for folder in ['trash', 'plain', 'hash']:
            if stop:
                break

            # Let's convert the single date to a path
            arg_dir = '/'.join(self.parentArgs.dir.split('-'))
            review_folder = self.settings['data_dir'] + '/organized/' + folder + "/" + arg_dir

            if not os_path.exists(review_folder):
                print 'Folder "' + folder + '" not found, skipping...'
                continue

            files = [os_path.join(w_path, filename)
                     for w_path, dirs, files in os_walk(review_folder)
                     for filename in files
                     if not filename.endswith(".csv")]

            idx = 0

            while idx < len(files):
                rfile = files[idx]

                # Clear the screen before displaying the text
                system('cls' if name == 'nt' else 'clear')

                # Let's the terminal size, so I can fill it with the file text
                cols, rows = get_terminal_size()

                print colorama.Fore.YELLOW + rfile
                print("")

                with open(rfile) as tfile:
                    i = 0
                    for line in tfile:
                        i += 1
                        if i >= (rows - 6):
                            break

                        if len(line) <= cols:
                            print line.strip('\n\r')
                        else:
                            print line[0:cols].strip('\n\r')

                files_left = len(files) - idx
                print("")
                print colorama.Fore.YELLOW + "Folder: " + folder + " " + colorama.Fore.CYAN + str(files_left) + colorama.Fore.YELLOW + " files left"

                input_descr = colorama.Fore.MAGENTA + "[o]"
                input_descr += colorama.Fore.CYAN + "open "
                input_descr += colorama.Fore.MAGENTA + "[s]"
                input_descr += colorama.Fore.CYAN + "kip folder "
                input_descr += colorama.Fore.MAGENTA + "[n]"
                input_descr += colorama.Fore.CYAN + "ext "
                input_descr += colorama.Fore.MAGENTA + "[p]"
                input_descr += colorama.Fore.CYAN + "revious "
                input_descr += colorama.Fore.MAGENTA + "[q]"
                input_descr += colorama.Fore.CYAN + "uit=> "

                sys_stdout.write(input_descr)
                sys_stdout.flush()

                answer = getch()

                while answer == '':
                    pass

                idx += 1

                # Opening a file with the default application AND being cross platform is a PITA...
                if answer == 'o':
                    current_os = platform_system()
                    if current_os == 'Windows':
                        from os import startfile as os_startfile
                        os_startfile(rfile)
                    elif current_os == 'Linux':
                        subprocess_call(["xdg-open", rfile])
                    elif current_os == 'Darwin':
                        system("open " + rfile)

                    # Let's start the loop again to read the new key
                    answer = getch()

                    while answer == '':
                        pass

                if answer == 'n':
                    print("")
                elif answer == 'p':
                    if idx >= 2:
                        idx -= 2
                    continue
                elif answer == 's':
                    break
                elif answer == 'q':
                    print("")
                    stop = True
                    break
                else:
                    print("")

        print(colorama.Fore.GREEN + "Review completed")
예제 #49
0
 def iter_mbdb():
   for f in next(os_walk(self._root))[1]:
     if f != 'Snapshot':
       mbdb = path_join(self._root, f, 'Manifest.mbdb')
       if isfile(mbdb):
         yield mbdb
예제 #50
0
    def run(self):
        need_normal_clean = True
        exclude_files = []
        remove_files = []
        remove_dirs = []

        # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info`
        # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
        if self.all:
            need_normal_clean = True
            for dir_ in {'build', 'dist', 'cover'}:
                dir_path = path_join(ROOT_PACKAGE_PATH, dir_)
                if path_exists(dir_path):
                    remove_dirs.append(dir_path)
            for root, dirs, files in os_walk(ROOT_PACKAGE_PATH):
                for dir_ in dirs:
                    if '_pyxbld' in dir_ or 'egg-info' in dir_:
                        remove_dirs.append(path_join(root, dir_))

            # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
            for root, dirs, files in os_walk(MAIN_PACKAGE_PATH):
                for file_ in files:
                    if file_ not in exclude_files:
                        if path_splitext(file_)[-1] in {'.so', '.c'}:
                            remove_files.append(path_join(root, file_))

                        tmp_name, tmp_ext = path_splitext(file_)
                        if tmp_ext == '.pyx':
                            # Check if we have a html with the same name
                            check_html_path = path_join(
                                root, tmp_name + '.html')
                            if isfile(check_html_path):
                                remove_files.append(check_html_path)

        # do the general clean
        if need_normal_clean:
            for file_ in {'.coverage', 'MANIFEST'}:
                if path_exists(file_):
                    remove_files.append(file_)

            for root, dirs, files in os_walk(ROOT_PACKAGE_PATH):
                for file_ in files:
                    if file_ not in exclude_files:
                        if path_splitext(file_)[-1] in {
                                '.pyc', '.pyo', '.pyd', '.o', '.orig'
                        }:
                            remove_files.append(path_join(root, file_))
                for dir_ in dirs:
                    if '__pycache__' in dir_:
                        remove_dirs.append(path_join(root, dir_))

        # REMOVE ALL SELECTED
        # noinspection PyBroadException
        try:
            for file_ in remove_files:
                if path_exists(file_):
                    os_remove(file_)
            for dir_ in remove_dirs:
                if path_exists(dir_):
                    rmtree(dir_)
        except Exception:
            pass
NewProjectName__lower_case = NEW_PROJECT_NAME.lower()
OrigTemplateName__lower_case = 'template_pyproject'

OrigTemplateName = 'TEMPLATE_PyPROJECT'

OrigTemplateOneLineDescription = 'TEMPLATE__OneLine_PyPROJECT_Description'

# check that the TEMPLATE_PyPROJECT_DIR_PATH dir exist
if not path_isdir(TEMPLATE_PyPROJECT_DIR_PATH):
   # noinspection PyPep8
   raise Exception('\n\n\nATTENTION::::::The Specified TEMPLATE_PyPROJECT_DIR_PATH Dir does not exist:\n<{}>\n\n'.format(TEMPLATE_PyPROJECT_DIR_PATH))


DirList = []
FileList = []
for root, dirs, files in os_walk(TEMPLATE_PyPROJECT_DIR_PATH, topdown=False):
   for dir_ in dirs:
      DirList.append((root, dir_))
   for file_ in files:
      FileList.append((root, file_))


# FIRST: replace text in Files
for root, file_ in FileList:
   file_path = path_join(root, file_)
   # check SkipFileNames
   if path_basename(file_path) in SkipFileNames:
      continue
   with open(file_path, 'r') as file_p:
      file_content = file_p.read()
    def cmd_save_photo(self):
        dir_with_photo = filedialog.askdirectory(parent=self.master, title=get_name("dia_save_photo"), initialdir='/')
        if not dir_with_photo:
            return
        if settings["save_photo"]["save_originals"] == "True":
            file_operation = shutil_copy2
        else:
            file_operation = shutil_move

        # Get list of files to save
        pho_for_saving_without_date = []
        ph_for_saving_with_date = []

        if settings["save_photo"]["check_unsorted"] == "True":
            # Check unsorted files
            files = next(os_walk(os_path.join(settings["projects_dir"], dir_unsorted)))[2]
            for file in files:
                if os_path.splitext(file)[-1].lower() in supported_image_ext:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching:
                            try:
                                # Convert collected numeric parts into datetime object
                                ph_for_saving_with_date.append([os_path.join(settings["projects_dir"],
                                                                             dir_unsorted,
                                                                             file),
                                                                datetime.strptime(str(found_matching.group(1)),
                                                                                  '%Y-%m-%d_%H-%M-%S'),
                                                                None])
                            except ValueError:
                                continue

        for root, _, files in os_walk(dir_with_photo):
            for file in files:
                if os_path.splitext(file)[-1].lower() in supported_image_ext:
                    try:
                        # Try to find date/time in metadata
                        possible_dt = et.get_data_from_image(os_path.join(root, file),
                                                             "-EXIF:DateTimeOriginal")["EXIF"]["DateTimeOriginal"]
                        # Convert collected numeric parts into datetime object
                        ph_for_saving_with_date.append([os_path.join(root,
                                                                     file),
                                                        datetime.strptime(possible_dt,
                                                                          '%Y:%m:%d %H:%M:%S'),
                                                        None])
                    # If date/time were not found in metadata too
                    except ValueError:
                        pho_for_saving_without_date.append(os_path.join(root, file))
                        continue

        # Connect photo and project basing on date/time
        for project in next(os_walk(settings["projects_dir"]))[1]:
            if not os_path.isfile(os_path.join(settings["projects_dir"], project, project_file)):
                continue

            with open(os_path.join(os_path.join(settings["projects_dir"]),
                                   project,
                                   project_file),
                      encoding='utf-8') as _f:
                pd = json_load(_f)

            # Parse project timeslot
            prj_start = '{0} {1}'.format(pd["timeslot"]["start"]["date"], pd["timeslot"]["start"]["time"])
            prj_start = datetime.strptime(prj_start, "%d.%m.%Y %H:%M")
            prj_finish = '{0} {1}'.format(pd["timeslot"]["finish"]["date"], pd["timeslot"]["finish"]["time"])
            prj_finish = datetime.strptime(prj_finish, "%d.%m.%Y %H:%M")

            for ph in ph_for_saving_with_date:
                if ph[2] is not None:
                    continue

                if prj_start <= ph[1] <= prj_finish:  # If photo date/time in project timeslot
                    ph[2] = os_path.join(settings["projects_dir"], project, dir_source)

        for ph in ph_for_saving_with_date:
            dest_dir = os_path.normpath(ph[2]) if ph[2] is not None else os_path.join(settings["projects_dir"],
                                                                                      dir_unsorted)
            # TODO: file renaming according to template YYYY-MM-DD_HH-MM-SS.ext
            if os_path.split(ph[0])[0] == dest_dir:
                trace.debug("Try to move photo to the same location: {0}".format(ph[0]))
            else:
                trace.debug("Save photo: {0} -> {1}".format(os_path.normpath(ph[0]), dest_dir))
                try:
                    # Copy/move image
                    file_operation(os_path.normpath(ph[0]), dest_dir)
                    # Copy/move XMP file too if it exists
                    if os_path.isfile(os_path.splitext(ph[0])[0] + xmp_ext):
                        file_operation(os_path.splitext(ph[0])[0] + xmp_ext, dest_dir)
                except shutil_Error as e:  # For example, if file already exists in destination directory
                    trace.warning(e)
예제 #53
0
 def removeFiles(self, targetdir):
     for root, dirs, files in os_walk(targetdir):
         for name in files:
             os_remove(os_path.join(root, name))
예제 #54
0
파일: clic.py 프로젝트: matt-hayden/cutils
def header(infolder,
           line=comment_LINE,
           block=comment_BLOCK,
           include=INCLUDE,
           exclude=EXCLUDE,
           overwrite=False):
    # Compile regular expression pattern to match in scanned files
    pattern = re_compile(_COMMENT.format(r'|'.join(map(comment_escape, line)),
                                         *comment_block_comments(block)),
                         flags=re_DOTALL | re_VERBOSE | re_MULTILINE)
    # Define default values
    align = _FORMAT['CENTER']
    width = 80
    # Update values based on INFO file
    values = {}
    with open(os_path_join(infolder, 'INFO'), 'r', encoding='utf-8') as file:
        header = file.read()
        match = re_match(r'\s*#\s*format\s+'
                         r'((?P<align>CENTER|LEFT|RIGHT)\s+)?'
                         r'(?P<width>\d+)?', header)
        if match:
            align, width = match.group('align', 'width')
            align = _FORMAT.get(align, _FORMAT['CENTER'])
            try:
                width = int(width)
            except TypeError:
                pass
        # Add leading and trailing empty line
        header = '\n{}\n'.format(header[match.end():].strip())

    # Get file contents of special files
    for filename in _FILES:
        try:
            with open(os_path_join(infolder, filename), 'r', encoding='utf-8') as file:
                values[filename] = file.read().strip()
        except FileNotFoundError:
            values[filename] = ''

    # Get special values
    values['DATE'] = datetime.now().strftime('%Y.%m.%d')

    # Exclude containers
    except_dirs  = []  # relative path to dir from root
    except_files = []  # relative path to file from root
    except_names = []  # filename (with extension) anywhere
    except_exts  = []  # extension anywhere

    # If 'exclude' is dictionary like object
    try:
        _empty = ()
        # Excludes relative to root
        for key, container in zip(('folders', 'files'),
                                  (except_dirs, except_files)):
            container.extend(os_path_join(infolder, p) for p in exclude.get(key, _empty))
        # Excludes anywhere
        for key, container in zip(('names', 'extensions'),
                                  (except_names, except_exts)):
            container.extend(exclude.get(key, _empty))
    # If 'exclude' is an iterable object
    except AttributeError:
        except_names = exclude

    # Include containers
    permit_names = []  # filename (with extension) anywhere
    permit_exts  = []  # extension anywhere

    # If 'include' is dictionary like object
    try:
        _empty = ()
        # Includes anywhere
        for key, container in zip(('names', 'extensions'),
                                  (permit_names, permit_exts)):
            container.extend(include.get(key, _empty))
    # If 'include' is an iterable object
    except AttributeError:
        permit_names = include

    # Walk through all files and folders in the passed folder
    # FIXME: what if none of the files changed only INFO has been updated?
    # Scan through all files and folders
    with check_Checker(infolder, file='.clic_cache') as checker:
        for root, dirs, filenames in os_walk(infolder):
            # If skip this folder and all subfolders
            if root in except_dirs:
                dirs.clear()
                continue
            # Check all files in folder
            for filename in filenames:
                filepath = os_path_join(root, filename)[2:]
                # If skip this exact file
                if filepath in except_files:
                    continue
                name, extension = os_path_splitext(filename)
                # If file or extension is not banned and it is on the
                # white-list and it changed since last time checked and
                # this is not and overwrite-call
                if (filename not in except_names and
                    extension not in except_exts and
                    (extension in permit_exts or
                     filename  in permit_names) and
                    checker.ischanged(filepath) and
                    not overwrite):
                    values['SIZE'] = _size(os_path_getsize(filepath))
                    # FIXME: make it more generic than ./ -- what if ../../?
                    values['FILE'] = filepath[2:] if filepath.startswith('./') else filepath
                    values['FILE_NAME'] = file
                    values['FILE_BASE'] = name
                    if _comment(header.format(**values), filepath, pattern, align, width):
                        # Update checker after the file has been modified
                        checker.update()
                        # Report
                        print('CLIC: processed {!r}'.format(filepath))
    def __init__(self, master=None, path=None, project_keywords=None):
        self.photo_for_analysis = []
        self.project_keywords = project_keywords
        self.master = master

        # Dictionary to combine results of all analysis types together
        self.results = {}

        self.ch_btn_addr_values = {}
        self.ch_btn_kw_values = {}

        # Collect all photos (with allowed extensions) with paths
        for top, _, files in os_walk(path):
            for _f in files:
                if os_path.splitext(_f)[1].lower() in supported_ext_for_analysis:
                    self.photo_for_analysis.append(os_path.join(top, _f))

        if not self.photo_for_analysis:  # If no photos found
            if project_keywords is not None:  # If photo analysis was requested from project
                raise ValueError
            while True:
                # Ask to choose another folder
                # Use main window as parent for dialog, because TopLevel window for this class is not created yet
                if messagebox.askyesno(parent=self.master,
                                       title=get_name('title_dia_1_photo_an'),
                                       message=get_name('text_dia_1_photo_an')):  # Ok
                    # Use main window as parent for dialog, because TopLevel window for this class is not created yet
                    new_path = filedialog.askdirectory(parent=self.master, title=get_name("ask_dir_photo_an"))
                    for top, _, files in os_walk(new_path):
                        for _f in files:
                            if os_path.splitext(_f)[1].lower() in supported_ext_for_analysis:
                                self.photo_for_analysis.append(os_path.join(top, _f))
                    if self.photo_for_analysis:  # Break from the loop if now photos are found
                        break
                else:  # Cancel
                    raise ValueError

        # If photo analyzer is called not from project
        if self.project_keywords is None:
            # Check project file in selected folder
            if os_path.isfile(os_path.join(path, project_file)):
                # Load keywords from project file
                with open(os_path.join(path, project_file), encoding='utf-8') as fj:
                    self.project_keywords = json_load(fj)["keywords"]

        Toplevel.__init__(self, master)
        self.bind('<Escape>', lambda _: self.destroy())
        self.geometry("+200+200")
        self.config(background=main_bg,
                    padx=top_level_padding,
                    pady=top_level_padding)
        self.resizable(FALSE, FALSE)
        self.focus_force()
        self.title(get_name("win_photo_an"))

        self.current_photo_ix = 0

        self.canvas_with_img = CanvasWithImage(master=self,
                                               image=self.photo_for_analysis[self.current_photo_ix],
                                               side=settings['photo_an']['preview_size'])
        self.canvas_with_img.bind('<Button-1>', self.next_photo)

        self.frame_main = ttk.Frame(master=self, padding=10)
        self.frame_controls = ttk.Frame(master=self.frame_main)

        self.frame_analysis_type = ttk.LabelFrame(master=self.frame_controls,
                                                  text=get_name("frame_analysis_type"))
        self.ch_btn_geo_an_value = StringVar()
        self.ch_btn_geo_an_value.set(settings['photo_an']['geo_an'])
        self.ch_btn_geo_an = ttk.Checkbutton(master=self.frame_analysis_type,
                                             text=get_name("ch_btn_geo_an"),
                                             variable=self.ch_btn_geo_an_value,
                                             onvalue='True',
                                             offvalue='False')

        self.ch_btn_obj_detect_an_value = StringVar()
        self.ch_btn_obj_detect_an_value.set(settings['photo_an']['obj_detect_an'])
        self.ch_btn_obj_detect_an = ttk.Checkbutton(master=self.frame_analysis_type,
                                                    text=get_name("ch_btn_obj_detect_an"),
                                                    variable=self.ch_btn_obj_detect_an_value,
                                                    onvalue='True',
                                                    offvalue='False')

        self.ch_btn_project_an_value = StringVar()
        self.ch_btn_project_an_value.set(settings['photo_an']['project_an'])
        self.ch_btn_project_an = ttk.Checkbutton(master=self.frame_analysis_type,
                                                 text=get_name("ch_btn_project_an"),
                                                 variable=self.ch_btn_project_an_value,
                                                 onvalue='True',
                                                 offvalue='False')

        self.btn_analyze = ttk.Button(master=self.frame_controls, text=get_name("btn_analyze"))
        self.btn_analyze.bind('<ButtonRelease-1>', self.analyze)
        self.btn_save = ttk.Button(master=self.frame_controls, text=get_name("btn_save"))
        self.btn_save.bind('<ButtonRelease-1>', self.save)

        self.frame_results = ttk.Frame(master=self.frame_main, padding=5)
        self.lbl_saved_iptc = ttk.Label(master=self.frame_main, text=self.get_formatted_saved_iptc())
        self.canvas_with_img.pack(fill=BOTH, side=LEFT)
        self.frame_main.pack(fill=BOTH, side=LEFT)
        self.frame_controls.grid(row=0, column=0, sticky=W + E, columnspan=2)
        self.frame_analysis_type.pack(fill=X)
        self.ch_btn_geo_an.grid(sticky=W, row=0, column=0)
        self.ch_btn_obj_detect_an.grid(sticky=W, row=0, column=1)
        self.ch_btn_project_an.grid(sticky=W, row=0, column=2)
        self.btn_analyze.pack(side=LEFT, fill=X)
        self.btn_save.pack(side=LEFT, fill=X)
        self.frame_results.grid(row=1, column=0, sticky=N)
        self.lbl_saved_iptc.grid(row=1, column=1, sticky=N)
    def project_selected(self):
        if self.frame_welcome is not None:
            self.frame_welcome.destroy()
            self.frame_welcome = None

        self.menu_project.entryconfig(1, state=ACTIVE)

        with open(self.project_file, encoding='utf-8') as f:
            self.project_dict = json_load(f)

        if self.frame_project is not None:
            self.frame_project.destroy()


        self.frame_project = ttk.Frame(master=self.master)

        self.frame_proj_info = ttk.LabelFrame(master=self.frame_project, text=get_name("frame_proj_info"))

        text = '''{0}:\t"{1}"

{2}:\t{3}\t{4}
{5}:\t{6}\t{7}

{8}:
{9}

{10}:
{11}'''.format(get_name('name'),
               self.project_dict['name'],
               get_name('start'),
               self.project_dict['timeslot']['start']['time'],
               self.project_dict['timeslot']['start']['date'],
               get_name('finish'),
               self.project_dict['timeslot']['finish']['time'],
               self.project_dict['timeslot']['finish']['date'],
               get_name('keywords'),
               self.project_dict['keywords'] if self.project_dict['keywords'] else get_name("empty"),
               get_name('description'),
               self.project_dict['description'] if self.project_dict['description'].strip() else get_name("empty"))

        self.lbl_proj_info = ttk.Label(master=self.frame_proj_info,
                                       justify=LEFT,
                                       wraplength=450,
                                       text=text)

        self.frame_proj_controls = ttk.LabelFrame(master=self.frame_project, text=get_name("frame_proj_controls"))
        self.btn_analyze_photo = ttk.Button(master=self.frame_proj_controls, text=get_name("btn_analyze_photo"))
        self.btn_edit = ttk.Button(master=self.frame_proj_controls, text=get_name("btn_edit"))
        self.btn_close_proj = ttk.Button(master=self.frame_proj_controls, text=get_name("btn_close_proj"))
        self.btn_delete_proj = ttk.Button(master=self.frame_proj_controls, text=get_name("btn_delete_proj"))
        self.btn_refresh = ttk.Button(master=self.frame_proj_controls, text=get_name("btn_refresh"))

        self.btn_analyze_photo.bind('<ButtonRelease-1>', self.analyze_photo_from_project)
        self.btn_edit.bind('<ButtonRelease-1>', self.edit_project)
        self.btn_close_proj.bind('<ButtonRelease-1>', lambda _: self.cmd_close_project())
        self.btn_delete_proj.bind('<ButtonRelease-1>', lambda _: self.delete_proj())
        self.btn_refresh.bind('<ButtonRelease-1>', self.refresh)

        self.frame_proj_stat = ttk.LabelFrame(master=self.frame_project, text=get_name("frame_proj_stat"))

        proj_path = os_path.split(self.project_file)[0]
        folders = next(os_walk(proj_path))[1]

        if folders:
            # Prepare table with statistics about folders and files
            # ----------------------------------------------------------------------------------------------------------
            self.tree_folders = ttk.Treeview(master=self.frame_proj_stat,
                                             columns=('files', 'nested_folders'),
                                             height=len(folders),
                                             selectmode=NONE)
            self.tree_folders.column('#0', stretch=False, width=145)
            self.tree_folders.heading('#0', text=get_name('folder'))
            self.tree_folders.column('files', stretch=False, width=145)
            self.tree_folders.heading('files', text=get_name('files'))
            self.tree_folders.column('nested_folders', stretch=False, width=190)
            self.tree_folders.heading('nested_folders', text=get_name('nested_folders'))

            for ix, folder in enumerate(folders, start=1):
                self.tree_folders.insert('', 'end', ix, text=folder)
                self.tree_folders.set(ix, 'files', len(next(os_walk(os_path.join(proj_path, folder)))[2]))
                self.tree_folders.set(ix, 'nested_folders', len(next(os_walk(os_path.join(proj_path, folder)))[1]))
            # ==========================================================================================================

            # Prepare table with statistics about photographs basing on source photographs
            # ----------------------------------------------------------------------------------------------------------
            self.tree_source = ttk.Treeview(master=self.frame_proj_stat,
                                            height=10,
                                            selectmode=NONE,
                                            columns=("xmp", "fullsize", "monitor", "web", "panorama", "layered"))

            self.scroll_tree_y = ttk.Scrollbar(master=self.frame_proj_stat, orient='vertical', command=self.tree_source.yview)
            self.tree_source.configure(yscroll=self.scroll_tree_y.set)

            source_files = []
            xmp_files = []
            xmp_files_num = 0
            fs_files_num = 0
            mon_files_num = 0
            web_files_num = 0
            pan_files_num = 0
            layered_files_num = 0

            for file in next(os_walk((os_path.join(proj_path, dir_source))))[2]:
                if os_path.splitext(file)[-1].lower() in supported_image_ext:
                    source_files.append(file)
                if os_path.splitext(file)[-1].lower() == xmp_ext:
                    xmp_files.append(file)

            for source_file in source_files:
                fn_without_ext = os_path.splitext(source_file)[0]
                self.tree_source.insert('', 'end', fn_without_ext, text=source_file)
                for file in xmp_files:
                    if os_path.splitext(file)[0] == fn_without_ext:
                        xmp_files_num += 1
                        self.tree_source.set(fn_without_ext, 'xmp', '+')
                        break

                if os_path.isdir(os_path.join(proj_path, dir_fullsize)):
                    for file in next(os_walk((os_path.join(proj_path, dir_fullsize))))[2]:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching and found_matching.group(1) == fn_without_ext:
                            fs_files_num += 1
                            self.tree_source.set(fn_without_ext, 'fullsize', '+')
                            break

                if os_path.isdir(os_path.join(proj_path, dir_monitor)):
                    for file in next(os_walk((os_path.join(proj_path, dir_monitor))))[2]:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching and found_matching.group(1) == fn_without_ext:
                            mon_files_num += 1
                            self.tree_source.set(fn_without_ext, 'monitor', '+')
                            break

                if os_path.isdir(os_path.join(proj_path, dir_web)):
                    for file in next(os_walk((os_path.join(proj_path, dir_web))))[2]:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching and found_matching.group(1) == fn_without_ext:
                            web_files_num += 1
                            self.tree_source.set(fn_without_ext, 'web', '+')
                            break

                if os_path.isdir(os_path.join(proj_path, dir_panorama)):
                    for file in next(os_walk((os_path.join(proj_path, dir_panorama))))[2]:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching and found_matching.group(1) == fn_without_ext:
                            pan_files_num += 1
                            self.tree_source.set(fn_without_ext, 'panorama', '+')
                            break

                if os_path.isdir(os_path.join(proj_path, dir_layered)):
                    for file in next(os_walk((os_path.join(proj_path, dir_layered))))[2]:
                        found_matching = dt_in_fn_regex.match(file)
                        if found_matching and found_matching.group(1) == fn_without_ext:
                            layered_files_num += 1
                            self.tree_source.set(fn_without_ext, 'layered', '+')
                            break

            text = """{13}
({14} - {0}):
{15}\t\t{16}\t{17}
XMP:\t\t{1}\t\t\t{2}%
Fullsize:\t\t{3}\t\t\t{4}%
Monitor:\t\t{5}\t\t\t{6}%
Web:\t\t{7}\t\t\t{8}%
Panorama:\t{9}\t\t\t{10}%
Layered:\t\t{11}\t\t\t{12}%""".format(len(source_files),
                                      xmp_files_num,
                                      int(xmp_files_num / len(source_files) * 100),
                                      fs_files_num,
                                      int(fs_files_num / len(source_files) * 100),
                                      mon_files_num,
                                      int(mon_files_num / len(source_files) * 100),
                                      web_files_num,
                                      int(web_files_num / len(source_files) * 100),
                                      pan_files_num,
                                      int(pan_files_num / len(source_files) * 100),
                                      layered_files_num,
                                      int(layered_files_num / len(source_files) * 100),
                                      get_name("stat_of_edited"),
                                      get_name("source_files"),
                                      get_name("type"),
                                      get_name("num_of_files"),
                                      get_name("percent_from_source"))

            self.lbl_source_stat = ttk.Label(master=self.frame_proj_stat, text=text)

            self.tree_source.heading('#0', text='Source')
            self.tree_source.heading('xmp', text='XMP')
            self.tree_source.heading('fullsize', text='FS')
            self.tree_source.heading('monitor', text='Mon')
            self.tree_source.heading('web', text='Web')
            self.tree_source.heading('panorama', text='Pan')
            self.tree_source.heading('layered', text='Lrd')

            self.tree_source.column('#0', stretch=False, width=170)
            self.tree_source.column('xmp', stretch=False, width=50)
            self.tree_source.column('fullsize', stretch=False, width=50)
            self.tree_source.column('monitor', stretch=False, width=50)
            self.tree_source.column('web', stretch=False, width=50)
            self.tree_source.column('panorama', stretch=False, width=50)
            self.tree_source.column('layered', stretch=False, width=50)

        else:
            self.lbl_no_st_empty_prj = ttk.Label(master=self.frame_proj_stat, text=get_name("lbl_no_st_empty_prj"))

        self.frame_project.pack(fill=BOTH)
        self.frame_proj_info.grid(row=0, column=0, sticky=W + E + N + S)
        self.lbl_proj_info.pack(fill=X)
        self.frame_proj_controls.grid(row=1, column=0, sticky=W + E + N + S)
        self.btn_analyze_photo.pack(fill=X)
        self.btn_edit.pack(fill=X)
        self.btn_close_proj.pack(fill=X)
        self.btn_delete_proj.pack(fill=X)
        self.btn_refresh.pack(fill=X)
        self.frame_proj_stat.grid(row=0, column=1, rowspan=2, sticky=W + E + N + S)
        if folders:
            self.tree_folders.pack()
            self.lbl_source_stat.pack(fill=X)
            self.tree_source.pack(side=LEFT)
            self.scroll_tree_y.pack(side=RIGHT, fill=Y, expand=1)
        else:
            self.lbl_no_st_empty_prj.pack(fill=X)
예제 #57
0
cf = ConfigParser()
cf.read("config.ini")

print(f"{'='*5} Packaging {cf['DATA']['name']} {'='*5}")

print(" - Copying src => build...")
shutil.rmtree("build", ignore_errors=True)
shutil.copytree("src", "build")

v_mj, v_mn = list(map(int, cf['DATA']['version'].split(".")))
cf['DATA']['version'] = f"{v_mj}.{v_mn+1}"
print(f" - Increasing version to {cf['DATA']['version']}")

print(" - Linting files...")
j_counter, m_counter = 0, 0
for root, dirs, files in os_walk("build"):
    for filename in files:
        with open(file_path := root + os_sep + filename, "r+") as f:
            contents = ""
            if filename.endswith(".json"):
                j_counter += 1
                try:
                    j = json.load(f)
                except json.JSONDecodeError as e:
                    print(f"Failed to parse {file_path}:\n    {e.args[0]}")
                    exit(-1)
                contents = json.dumps(j, separators=(',', ':'))
            elif filename.endswith(".mcfunction") or filename.endswith(
                    ".mcmeta"):
                m_counter += filename.endswith(".mcfunction")
                lines = [
예제 #58
0
   def run(self):
      need_normal_clean = True
      exclude_files = [
         'benchmark_it.c',
         'disassemble_it.c',
         'line_memory_profile_it.c',
         'profile_it.c',
         'speed_it.c',
         'utils.c',
         '_version.c',
      ]
      remove_files = []
      remove_dirs = []

      # remove ONLY: `build/sphinx`
      if self.onlydocs:
         need_normal_clean = False
         dir_path = path_join(ROOT_PACKAGE_PATH, 'build', 'sphinx')
         if path_exists(dir_path):
            remove_dirs.append(dir_path)

      # remove also: DIRS: `build, dist, cover, *.egg-info, *._pyxbld`
      # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
      if self.all:
         need_normal_clean = True
         for dir_ in {'build', 'dist', 'cover'}:
            dir_path = path_join(ROOT_PACKAGE_PATH, dir_)
            if path_exists(dir_path):
               remove_dirs.append(dir_path)
         for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH):
            for dir_ in dirs_w:
               if '_pyxbld' in dir_ or 'egg-info' in dir_:
                  remove_dirs.append(path_join(root, dir_))

         # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
         for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH):
            for file_ in files_w:
               if file_ not in exclude_files:
                  if path_splitext(file_)[-1] in {'.so', '.c'}:
                     remove_files.append(path_join(root, file_))

                  tmp_name, tmp_ext = path_splitext(file_)
                  if tmp_ext == '.pyx':
                     # Check if we have a html with the same name
                     check_html_path = path_join(root, tmp_name + '.html')
                     if path_isfile(check_html_path):
                        remove_files.append(check_html_path)

      # remove also: all files defined in exclude_files
      if self.excludefiles:
         for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH):
            for file_ in files_w:
               if file_ in exclude_files:
                  remove_files.append(path_join(root, file_))

      # do the general clean
      if need_normal_clean:
         for file_ in {'.coverage', 'MANIFEST'}:
            if path_exists(file_):
               remove_files.append(file_)

         for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH):
            for file_ in files_w:
               if file_ not in exclude_files:
                  if path_splitext(file_)[-1] in {'.pyc', '.pyo', '.pyd', '.o', '.orig'}:
                     remove_files.append(path_join(root, file_))
            for dir_ in dirs_w:
               if '__pycache__' in dir_:
                  remove_dirs.append(path_join(root, dir_))

      # REMOVE ALL SELECTED
      # noinspection PyBroadException
      try:
         for file_ in remove_files:
            if path_exists(file_):
               os_remove(file_)
         for dir_ in remove_dirs:
            if path_exists(dir_):
               shutil_rmtree(dir_)
      except Exception:
         pass
예제 #59
0
    def run(self):
        need_normal_clean = True
        exclude_files = [
            'lconf_classes.c',
            'lconf_structure_classes.c',
            'main_code.c',
            'transform.c',
            'utils.c',
            'validator.c',
            '_version.c',
        ]
        remove_files = []
        remove_dirs = []

        # remove ONLY: `build/sphinx`
        if self.onlydocs:
            need_normal_clean = False
            dir_path = path_join(ROOT_PACKAGE_PATH, 'build', 'sphinx')
            if path_exists(dir_path):
                remove_dirs.append(dir_path)

        # remove also: DIRS: `build, dist, cover, *._pyxbld, *.egg-info`
        # and FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
        if self.all:
            need_normal_clean = True
            for dir_ in {'build', 'dist', 'cover'}:
                dir_path = path_join(ROOT_PACKAGE_PATH, dir_)
                if path_exists(dir_path):
                    remove_dirs.append(dir_path)
            for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH):
                for dir_ in dirs_w:
                    if '_pyxbld' in dir_ or 'egg-info' in dir_:
                        remove_dirs.append(path_join(root, dir_))

            # remove FILES in MAIN_PACKAGE_PATH: `*.so, *.c` and cython annotate html
            for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH):
                for file_ in files_w:
                    if file_ not in exclude_files:
                        if path_splitext(file_)[-1] in {'.so', '.c'}:
                            remove_files.append(path_join(root, file_))

                        tmp_name, tmp_ext = path_splitext(file_)
                        if tmp_ext == '.pyx':
                            # Check if we have a html with the same name
                            check_html_path = path_join(
                                root, tmp_name + '.html')
                            if path_isfile(check_html_path):
                                remove_files.append(check_html_path)

        # remove also: all files defined in exclude_files
        if self.excludefiles:
            for root, dirs_w, files_w in os_walk(MAIN_PACKAGE_PATH):
                for file_ in files_w:
                    if file_ in exclude_files:
                        remove_files.append(path_join(root, file_))

        # do the general clean
        if need_normal_clean:
            for file_ in {'.coverage', 'MANIFEST'}:
                if path_exists(file_):
                    remove_files.append(file_)

            for root, dirs_w, files_w in os_walk(ROOT_PACKAGE_PATH):
                for file_ in files_w:
                    if file_ not in exclude_files:
                        if path_splitext(file_)[-1] in {
                                '.pyc', '.pyo', '.pyd', '.o', '.orig'
                        }:
                            remove_files.append(path_join(root, file_))
                for dir_ in dirs_w:
                    if '__pycache__' in dir_:
                        remove_dirs.append(path_join(root, dir_))

        # REMOVE ALL SELECTED
        # noinspection PyBroadException
        try:
            for file_ in remove_files:
                if path_exists(file_):
                    os_remove(file_)
            for dir_ in remove_dirs:
                if path_exists(dir_):
                    shutil_rmtree(dir_)
        except Exception:
            pass