示例#1
0
 def test_not_overwrite_file(self, mocker):
     mock = mocker.patch.object(nova_api.os.path,
                                "isfile",
                                return_value=True)
     nova_api.create_api_files(EntityForTest, EntityDAO, '')
     assert not is_file('entityfortest_api.yml')
     assert not is_file('entityfortest_api.py')
示例#2
0
文件: utils.py 项目: CuteFwan/hata
def _iter_name_maybe_directory(name):
    """
    Iterates over the extension's names if it is a directory.
    
    This method is a generator.
    
    Parameters
    ----------
    name : `str`
        An extension's name.
    
    Yields
    ------
    name : `str`
        Extension names.
    """
    path_end = join_path(*name.split('.'))
    for base_path in route_paths:
        path = join_path(base_path, path_end)
        if is_directory(path) and (not is_file(join_path(path,
                                                         '__init__.py'))):
            for file_name in list_directory(path):
                file_path = join_path(path, file_name)
                if is_directory(file_path):
                    if is_file(join_path(path, '__init__.py')):
                        yield f'{name}.{file_name}'
                    continue

                if is_file(file_path):
                    if file_name.endswith('.py'):
                        yield f'{name}.{file_name[:-3]}'
                    continue
            return

    yield name
示例#3
0
def move_file(source_file, destination_dir):
	file_name_with_type = os.path.basename(source_file)	
	file_name, file_type = file_name_with_type.rsplit( '.', 1 )

	destination_file = path.join( destination_dir, file_name_with_type )

	if is_file( destination_file ): # means file exist, so rename it
		destination_file = path.join( destination_dir, file_name + ' - copy 2.' + file_type )

	if is_file( source_file ):
		shutil.move( source_file, destination_file )
示例#4
0
def _iter_folder(import_name, folder_path):
    """
    Iterates over a folder's import names.
    
    Parameters
    ----------
    import_name : `None`, `str`
        The name of the extension if we would import it.
    folder_path : `str`
        Path to the folder
    
    Yields
    ------
    import_name : `None`, `str`
        Detected import names for each applicable file in the folder.
    path : `str`
        Path of the file.
    """
    for python_extension_name in PYTHON_EXTENSION_NAMES:
        file_path = join_paths(folder_path, f'__init__{python_extension_name}')
        if exists(file_path) and is_file(file_path):
            yield import_name, file_path
            return

    for file_name in list_directory(folder_path):
        if file_name.startswith('.') or (file_name == '__pycache__'):
            continue

        path = join_paths(folder_path, file_name)

        if is_file(path):
            for python_extension_name in PYTHON_EXTENSION_NAMES:
                if file_name.endswith(python_extension_name):
                    if import_name is None:
                        import_name_value = None
                    else:
                        import_name_value = f'{import_name}.{file_name[:-len(python_extension_name)]}'
                    yield import_name_value, path
                    break

            continue

        if is_folder(path):
            if import_name is None:
                import_name_value = None
            else:
                import_name_value = f'{import_name}.{file_name}'
            yield from _iter_folder(import_name_value, path)
            continue

        # no more cases
        continue
示例#5
0
def _lookup_path(import_name_or_path):
    """
    Detects the root of the given name.
    
    This function is an iterable generator.
    
    Parameters
    ----------
    import_name_or_path : `str`
        An extension's import name, or it's absolute path.
    
    Yields
    ------
    import_name : `None`, `str`
        Import name to an extension file.
    path : `str`
        Path of the file.
    
    Raise
    -----
    ImportError
        If `import_name_or_path` name could not be detected as en extension.
    """
    if is_absolute_path_name(import_name_or_path):
        if exists(import_name_or_path):
            if is_folder(import_name_or_path):
                yield from _iter_folder(None, import_name_or_path)
                return

            if is_file(import_name_or_path):
                yield None, import_name_or_path
                return
    else:
        path_end = join_paths(*import_name_or_path.split('.'))
        for base_path in route_paths:
            path = join_paths(base_path, path_end)
            if exists(path) and is_folder(path):
                yield from _iter_folder(import_name_or_path, path)
                return

            for python_extension_name in PYTHON_EXTENSION_NAMES:
                file_path = path + python_extension_name
                if exists(file_path) and is_file(file_path):
                    yield import_name_or_path, file_path
                    return

    raise ImportError(
        f'The given `import_name_or_path` could not be detected as an extension nor an absolute path, '
        f'got {import_name_or_path!r}.')
示例#6
0
文件: lib.py 项目: wonkodv/hanstool
def load_scripts(path):
    if not isinstance(path, pathlib.Path):
        path = str(path)
        path = os.path.expanduser(path)
        path = pathlib.Path(path)
    if path.is_dir():
        l = path.glob('*.py')
        # sort b.50.py before a.80.py
        l = sorted(l, key=lambda p: [p.suffixes[-2][1:] if len(p.suffixes)>1 else "",p])
        for p in l:
            load_scripts(p)
    elif path.is_file():
        with path.open("rt") as f:
            c = f.read()
        c = compile(c, str(path), "exec")
        try:
            env.Env['__FILE__'] = str(path.absolute())
            exec (c, env.Env.dict)
            del env.Env['__FILE__']
        except NotImplementedError:
            # Script wanted to be ignored
            pass
        SCRIPTS.append(path)
    else:
        raise Exception("neither file nor dir in load_Scripts", path)
示例#7
0
def get_zarr_checksum(path: Path, known: Optional[Dict[str, str]] = None) -> str:
    """
    Compute the Zarr checksum for a file or directory tree.

    If the digests for any files in the Zarr are already known, they can be
    passed in the ``known`` argument, which must be a `dict` mapping
    slash-separated paths relative to the root of the Zarr to hex digests.
    """
    if path.is_file():
        return cast(str, get_digest(path, "md5"))
    if known is None:
        known = {}

    def digest_file(f: Path) -> Tuple[Path, str, int]:
        assert known is not None
        relpath = f.relative_to(path).as_posix()
        try:
            dgst = known[relpath]
        except KeyError:
            dgst = md5file_nocache(f)
        return (f, dgst, os.path.getsize(f))

    zcc = ZCDirectory()
    for p, digest, size in threaded_walk(path, digest_file):
        zcc.add(p.relative_to(path), digest, size)
    return zcc.get_digest_size()[0]
def count_lines_and_files(paths_lines=None,
                          paths_files=None,
                          line_glob=None,
                          file_glob=None) -> dict:
    """Counts lines and files in the given paths."""

    result = {}

    for path in arg_to_iter(paths_lines):
        path = Path(path).resolve()
        if path.is_dir():
            files = path.glob(line_glob) if line_glob else path.iterdir()
        elif path.is_file():
            files = (path, )
        else:
            files = ()
        for file in files:
            LOGGER.info("Counting lines in <%s>...", file)
            name = os.path.splitext(file.name)[0]
            result[f"lc_{name}"] = count_lines(file)

    for path in arg_to_iter(paths_files):
        path = Path(path).resolve()
        if not path.is_dir():
            continue
        for subdir in path.glob("**"):
            LOGGER.info("Counting files in <%s>...", subdir)
            if path == subdir:
                name = path.name
            else:
                relative = subdir.relative_to(path)
                name = "_".join(relative.parts)
            result[f"fc_{name}"] = count_files(subdir, glob=file_glob)

    return result
示例#9
0
文件: faves.py 项目: Peeveli/puavo-os
def load_use_counts(all_programs, directory):
    """Unserialize fave IDs and their counts."""

    from os.path import join as path_join, isfile as is_file

    faves_file = path_join(directory, 'faves')

    if not is_file(faves_file):
        return

    for row in open(faves_file, 'r').readlines():
        parts = row.strip().split()

        if len(parts) != 2:
            continue

        if not parts[0] in all_programs:
            logging.warning('Program "%s" listed in faves.yaml, but it does '
                            'not exist in the menu data', parts[0])
            continue

        try:
            all_programs[parts[0]].uses = int(parts[1])
        except Exception as exception:
            # the use count probably wasn't an integer...
            logging.warning('Could not set the use count for program "%s": %s',
                            parts[0], str(exception))
示例#10
0
文件: util.py 项目: f-koehler/mlxtk
def remove_file(path: Union[str, Path]):
    path = make_path(path)
    if path.exists() and path.is_file():
        LOGGER.debug("delete file: %s", str(path))
        path.unlink()
    else:
        LOGGER.debug("file does not exist/path is not a file, do nothing")
示例#11
0
    def _file_mode(self):
        """Return black.FileMode object, using local pyproject.toml as needed."""
        if self.override_config:
            return self.override_config

        # Unless using override, we look for pyproject.toml
        project_root = black.find_project_root(
            ("." if self.filename in self.STDIN_NAMES else self.filename, ))
        path = project_root / "pyproject.toml"

        if path in black_config:
            # Already loaded
            LOG.debug("flake8-black: %s using pre-loaded %s", self.filename,
                      path)
            return black_config[path]
        elif path.is_file():
            # Use this pyproject.toml for this python file,
            # (unless configured with global override config)
            # This should be thread safe - does not matter even if
            # two workers load and cache this file at the same time
            black_config[path] = load_black_mode(path)
            LOG.debug("flake8-black: %s using newly loaded %s", self.filename,
                      path)
            return black_config[path]
        else:
            # No project specific file, use default
            LOG.debug("flake8-black: %s using defaults", self.filename)
            return black_config[None]
示例#12
0
def filelist_from_patterns(patterns, ignore=None, base='.', sizesort=False):
    base = Path(base or '.').expanduser()

    filenames = set()
    for pattern in patterns or []:
        path = base / pattern
        if path.is_file():
            filenames.add(path)
            continue

        if path.is_dir():
            path += '/*'

        parts = path.parts[1:] if path.is_absolute() else path.parts
        pattern = str(Path("").joinpath(*parts))
        filenames.update(
            (p for p in Path(path.root).glob(pattern) if not p.is_dir()))

    filenames = list(filenames)

    def excluded(path):
        if any(path.match(ex) for ex in ignore):
            return True
        for part in path.parts:
            if any(Path(part).match(ex) for ex in ignore):
                return True

    if ignore:
        filenames = [path for path in filenames if not excluded(path)]
    if sizesort:
        filenames.sort(key=lambda f: f.stat().st_size)

    return filenames
def VerifyPath(name, path, isFileExpected):
    if not os.path.exists(path):
        raise Exception(f"{name} path does not exist: [{path}]")
    if (not isFileExpected) and not path.is_dir():
        raise Exception(f"{name} path is not a folder: [{path}]")
    if isFileExpected and not path.is_file():
        raise Exception(f"{name} path is not a file: [{path}]")
示例#14
0
def check_config():
	path = pathlib.Path('scanbot.cfg')
	if path.is_file():
		print("Read config OK...")	
	else:
		print("Couldn't find turret config file. Creating a new one...")
		
		config = configparser.RawConfigParser()
		config['LOGGER'] = {}
		config['LOGGER']['IP'] = '192.168.1.1'
		config['LOGGER']['Username'] = '******'
		config['LOGGER']['Password'] = '******'
		config['LOGGER']['SSH Run Command'] = './Logger'
		
		config['NAVIO'] = {}
		config['NAVIO']['IP'] = '192.168.1.1'
		config['NAVIO']['Username'] = '******'
		config['NAVIO']['Password'] = '******'
		config['NAVIO']['SSH Run Command'] = './Logger'
		
		config['TURRET'] = {}
		config['TURRET']['Tilt Servo Min'] = '0'
		config['TURRET']['Tilt Servo Max'] = '255'
		config['TURRET']['Tilt Servo Mid'] = '127'
		config['TURRET']['Pan Servo Min'] = '0'
		config['TURRET']['Pan Servo Max'] = '255'
		config['TURRET']['Pan Servo Mid'] = '127'
		
		with open('scanbot.cfg', 'w') as configfile:
			config.write(configfile)
		
		sys.exit("Please edit \"scanbot.cfg\" with correct information. The program will now stop.")
示例#15
0
文件: tasks.py 项目: alem0lars/ctfs
def test():
    for file_rel_path in list_dir(TEST_DIR):
        file_path = join_path(TEST_DIR, file_rel_path)
        file_name = base_name(file_path)
        if (is_file(file_path) and file_name.startswith('test_')
                and file_name.endswith('.py')):
            system('python {}'.format(file_path))
示例#16
0
文件: buck.py 项目: E-LLP/buck-1
 def includes_iterator():
     for pattern in includes:
         for path in search_base.glob(pattern):
             # TODO(bhamiltoncx): Handle hidden files on Windows.
             if path.is_file() and (include_dotfiles
                                    or not path.name.startswith('.')):
                 yield path
示例#17
0
def test_prepareLogging(tmp_path):
    path = tmp_path / "test.log"
    helpers.prepareLogging(filepath=path)
    logger = helpers.getLogger("1")
    logger1 = logger
    assert logger.getEffectiveLevel() == logging.INFO

    logger.info("something")
    assert path.is_file()

    helpers.prepareLogging(filepath=path, logLvl=logging.DEBUG)
    logger = helpers.getLogger("2")
    assert logger.getEffectiveLevel() == logging.DEBUG

    helpers.prepareLogging(
        filepath=path,
        logLvl=logging.INFO,
        lvlMap={
            "1": logging.NOTSET,
            "3": logging.WARNING
        },
    )
    logger = helpers.getLogger("3")
    assert logger.getEffectiveLevel() == logging.WARNING
    assert logger1.getEffectiveLevel() == logging.NOTSET
示例#18
0
def evaluate_file(file_name):
    """Evaluates the conditionals listed in a file and returns their
    results in a dict."""

    log_info('Loading conditionals from file "{0}"'.format(file_name))

    results = {}

    if not is_file(file_name):
        log_error('File "{0}" does not exist'.format(file_name))
        return results

    try:
        from yaml import safe_load as yaml_safe_load
        data = yaml_safe_load(open(file_name, 'r', encoding='utf-8').read())
    except Exception as e:
        log_error(e)
        return results

    for c in (data or []):
        if 'name' not in c:
            log_error('Ignoring a conditional without name, skipping')
            continue

        c_name = c['name']

        if c_name in results:
            log_error('Duplicate conditional "{0}", skipping'.
                      format(c_name))
            continue

        if 'method' not in c:
            log_error('Conditional "{0}" has no method defined, skipping'.
                      format(c_name))
            continue

        c_method = c['method']

        if c_method not in __METHODS:
            log_error('Conditional "{0}" has an unknown method "{1}", '
                      'skipping'.format(c_name, c_method))
            continue

        if ('params' not in c) or (c['params'] is None):
            log_error('Conditional "{0}" has no "params" block, skipping'.
                      format(c_name))
            continue

        try:
            results[c_name] = __METHODS[c_method](c_name, c['params'][0])
        except Exception as e:
            # Don't let a single conditional failure remove
            # everything in this file
            log_error(e)

    for k, v in results.items():
        log_debug('Conditional: name="{0}", result={1}'.format(k, v))

    return results
示例#19
0
def test():
    for file_rel_path in list_dir(TEST_DIR):
        file_path = join_path(TEST_DIR, file_rel_path)
        file_name = base_name(file_path)
        if (is_file(file_path)
            and file_name.startswith('test_')
            and file_name.endswith('.py')):
            system('python {}'.format(file_path))
 def build(cls, input_directory):
     forest = cls()
     for path in input_directory.iterdir():
         if not path.name.endswith(".permission") or not path.is_file():
             continue
         if path.name not in forest.nodes:
             forest.add_root_node_from_path(path)
     return forest
def writeData(data, file="task_list_OfTasks.csv"):
    path = Path(getDir(file))
    if path.is_file():
        with open(path, "w", newline="") as file:
            writer = csv.writer(file)
            writer.writerows(data)
    else:
        raise Exception("Bad File ya Tried to reach there of:\n\r", path)
示例#22
0
文件: cli.py 项目: litprog/litprog
def _iter_markdown_filepaths(input_paths: InputPaths) -> FilePaths:
    for path_str in input_paths:
        path = pl.Path(path_str)
        if path.is_file():
            yield path
        else:
            for ext in MARKDOWN_FILE_EXTENSIONS:
                for fpath in path.glob(f"**/*.{ext}"):
                    yield fpath
 def __GetEnvironmentText(self):
     ext = self.__GetExtension()
     if ext is None: return ''
     path = pathlib.Path(__file__).parent.parent / (
         'res/template/env/' + self.__GetOsName() + '_' + ext + '.md')
     if path.is_file():
         source = None
         with path.open() as f:
             return f.read().strip()
示例#24
0
	def md5_update_from_dir(directory, hash):
		assert Path(directory).is_dir()
		for path in sorted(Path(directory).iterdir()):
			hash.update(path.name.encode())
			if path.is_file():
				hash = md5_update_from_file(path, hash)
			elif path.is_dir():
				hash = md5_update_from_dir(path, hash)
		return hash
示例#25
0
def is_file(path):
    # type: (Path, ) -> Path
    """Checks if a path is an actual file"""

    if not path.is_file():
        msg = f"{path} is not a file"
        raise ArgumentTypeError(msg)

    return path
示例#26
0
    def parse_turret_config(self):
        path = pathlib.Path('scanbot.cfg')
        if path.is_file():
            print("Reading turret config file...")
            config = configparser.ConfigParser()
            config.read('scanbot.cfg')
            tilt_servo_min = config['TURRET']['Tilt Servo Min']
            tilt_servo_max = config['TURRET']['Tilt Servo Max']
            tilt_servo_mid = config['TURRET']['Tilt Servo Mid']
            pan_servo_min = config['TURRET']['Pan Servo Min']
            pan_servo_max = config['TURRET']['Pan Servo Max']
            pan_servo_mid = config['TURRET']['Pan Servo Mid']

        else:
            print("Couldn't find turret config file. Creating a new one...")

            config = configparser.RawConfigParser()
            config['LOGGER'] = {}
            config['LOGGER']['IP'] = '192.168.1.1'
            config['LOGGER']['Username'] = '******'
            config['LOGGER']['Password'] = '******'
            config['LOGGER']['SSH Run Command'] = './Logger'

            config['NAVIO'] = {}
            config['NAVIO']['IP'] = '192.168.1.1'
            config['NAVIO']['Username'] = '******'
            config['NAVIO']['Password'] = '******'
            config['NAVIO']['SSH Run Command'] = './Logger'

            config['TURRET'] = {}
            config['TURRET']['Tilt Servo Min'] = '0'
            config['TURRET']['Tilt Servo Max'] = '255'
            config['TURRET']['Tilt Servo Mid'] = '127'
            config['TURRET']['Pan Servo Min'] = '0'
            config['TURRET']['Pan Servo Max'] = '255'
            config['TURRET']['Pan Servo Mid'] = '127'

            with open('scanbot.cfg', 'w') as configfile:
                config.write(configfile)

            sys.exit(
                "Please edit \"scanbot.cfg\" with correct information. The program will now stop."
            )

        print("Parsed the following data:")
        print("Tilt Servo Min: " + tilt_servo_min)
        print("Tilt Servo Max: " + tilt_servo_max)
        print("Tilt Servo Mid: " + tilt_servo_mid)
        print("Pan Servo Min: " + pan_servo_min)
        print("Pan Servo Max: " + pan_servo_max)
        print("Pan Servo Mid: " + pan_servo_mid)

        return [
            tilt_servo_min, tilt_servo_max, tilt_servo_mid, pan_servo_min,
            pan_servo_max, pan_servo_mid
        ]
def readData(file="task_list_OfTasks.csv"):
    path = Path(getDir(file))
    if path.is_file():
        data = []
        with open(path, "r", newline="") as file:
            reader = csv.reader(file)
            for row in reader:
                data.append(row)
        return data
    raise Exception("Bad File ya Tried to reach there of:\n\r", path)
示例#28
0
def is_valid_clean_regex(parser, file_path):
    if path_exists(file_path) and is_file(file_path):
        with open(file_path, 'r') as f:
            patterns = f.read().splitlines()
        return [p.replace('[[:digit:]]', '\\d') for p in patterns]
    else:
        parser.error(
            'The path {} does not exist or is not a file!'.format(file_path))

    return []
示例#29
0
    def mouseDoubleClickEvent(self, event):
        idx = self.currentIndex()
        fInfo = self.model().fileInfo(idx)

        if fInfo:
            path = Path(fInfo.absoluteFilePath())
            if path.is_dir():
                super(CheckoutTreeView, self).mouseDoubleClickEvent(event)
            elif path.is_file():
                self.doubleClick.emit(idx, fInfo)
示例#30
0
 def _cmsis_required(self, project_path: Union[str, pathlib.Path]) -> bool:
     """Check if CMSIS dependency is required."""
     project_path = pathlib.Path(project_path)
     for path in (project_path / "codegen" / "host" / "src").iterdir():
         if path.is_file():
             with open(path, "r") as lib_f:
                 lib_content = lib_f.read()
             if "<arm_nnsupportfunctions.h>" in lib_content and "<arm_math.h>" in lib_content:
                 return True
     return False
示例#31
0
def validate_fileNames(fileList, extension=False):
    incorrect_paths = []
    all_clear = True
    for file in fileList:
        path = Path(file + extension)
        if not path.is_file():
            incorrect_paths.append(str(path))
            all_clear = False

    return all_clear, incorrect_paths
示例#32
0
def recursively_check( input_path ):
	if is_file( input_path ):
		try_to_move( input_path )

	elif is_dir( input_path ):
		directory_contents = list_dir( input_path )

		for file in directory_contents:
			file_path = path.join( input_path, file )
			recursively_check( file_path )
示例#33
0
def append_line_if_missing(path, line):
  assert line.endswith('\n')
  if is_file(path):
    for l in open(path):
      if l == line:
        errSL('already setup:', path)
        return
  errSL('modifying:', path)
  with open(path, 'a') as f:
    f.write('\n# automatically added by gloss-install-user.py\n')
    f.write(line)
示例#34
0
文件: __main__.py 项目: gwk/pat
def main_create(args: Namespace) -> None:
  'create command entry point.'
  original = args.original
  modified = args.modified
  patch = args.patch
  if not is_file(original): exit("pat create error: 'original' is not an existing file: " + original)
  if path_exists(modified): exit("pat create error: 'modified' file already exists: " + modified)
  if path_exists(patch):    exit("pat create error: 'patch' file already exists: " + patch)
  with open(patch, 'w') as f:
    f.write('pat v' + pat_version + '\n')
    f.write(original + '\n')
  copyfile(original, modified)
示例#35
0
文件: git-prompt.py 项目: gwk/gloss
  def find_branch():
    'returns  a pair: branch string (needs to be stripped) and mode suffix.'
    if is_file(gd + '/rebase-merge/interactive'):
      return open(gd + '/rebase-merge/head-name').read(), '|REBASE-i'
    if is_dir(gd + '/rebase-merge'):
      return open(gd + '/rebase-merge/head-name').read(), '|REBASE-m'

    # determine suffix first.
    if is_dir(gd + '/rebase-apply'):
      if is_file(gd + '/rebase-apply/rebasing'):
        s = '|REBASE'
      elif is_file(gd + '/rebase-apply/applying'):
        s = '|AM'
      else:
        s = '|AM/REBASE'
    elif is_file(gd + '/MERGE_HEAD'):
      s = '|MERGE'
    elif is_file(gd + '/CHERRY_PICK_HEAD'):
      s = '|CHERRY-PICK'
    elif is_file(gd + '/BISECT_LOG'):
      s = '|BISECT'
    else:
      s = ''

    c, b = runCO('git symbolic-ref HEAD')
    if c == 0:
      return b, s
    # detached.
    c, b = runCO('git describe --contains --all HEAD')
    if c == 0:
      return b, s
    # last option.
    try: head_sha = open(gd + '/HEAD').read()[:8]
    except FileNotFoundError: head_sha = 'unknown'
    return '({})'.format(head_sha), s
示例#36
0
def main():
  arg_parser = ArgumentParser(description='Count lines of source code.')
  arg_parser.add_argument('paths', nargs='+', help='Directories to explore.')
  args = arg_parser.parse_args()

  files = Counter()
  lines = Counter()
  blank = Counter()
  other_exts = Counter()

  # iterate over all paths.
  for top in args.paths:
    if is_file(top):
      count_path(top, files, lines, blank, other_exts)
      continue
    if ignore_dir_name(path_name(top)):
      continue
    for dirpath, dirnames, filenames in os.walk(top):
      # filter dirnames.
      dirnames[:] = [n for n in dirnames if not ignore_dir_name(n)]
      for name in filenames:
        path = path_join(dirpath, name)
        count_path(path, files, lines, blank, other_exts)

  for group_key in group_keys:
    group = groups[group_key]
    non_zero = False
    total_key = group_key.upper() + ' TOTAL'
    sorted_keys = sorted(group['exts'], key=lambda k: lines[k]) # sort by substantial line count.
    # tricky: count values for total_key as we go, then print values for total_key last.
    for e in sorted_keys + [total_key]:
      f = files[e]
      if f < 1:
        continue
      non_zero = True
      print('{:>12}: {:>5}  '.format(e, f), end='')
      files[total_key] += f
      if e in lines:
        l = lines[e]
        b = blank[e]
        t = l + b
        frac = float(l / t) if t > 0 else 0.0
        print(' {:>12,} lines; {:>12,} ({:.2f}) full'.format(t, l, frac), end='')
        lines[total_key] += l
        blank[total_key] += b
      print()
    if non_zero:
      print()

  if other_exts:
    items = sorted(other_exts.items(), key=lambda i: (-i[1], i[0]))
    print('; '.join('{}: {}'.format(ext, count) for (ext, count) in items))
示例#37
0
文件: views.py 项目: LuRsT/datdash
def serve_widget(widget_name):
    html = "%s.html" % widget_name
    path = join_path(
        getcwd(),
        DATDASH_APP_NAME,
        "widgets",
        widget_name,
        html
    )
    if is_file(path):
        f = open(path)
        contents = f.read()
        f.close()
        return contents
示例#38
0
    def export_variable_definitions(self) -> None:
        """TODO DOCSTRING
        :return: 
        """

        path = Path(self.variable_definition_path)
        if path.is_file():
            # delete old backup
            os.remove(self.variable_definition_path.join('.old'))

            # mark last saved variable definition as "old"
            os.rename(self.variable_definition_path, self.variable_definition_path('.old'))

        # serialize current active variable definition
        with open(self.variable_definition_path, 'w') as outfile:
            json.dump(self.variables, outfile)
示例#39
0
def new_project_folder(here, source, data):
    """Create a new bakedbeans project folder."""
    def finalize():
        copy_template_tree(path(here, 'tin'), project_root, data)
        # Ensure top-level package.
        with open(path(src_folder, '__init__.py'), 'a'):
            pass
        # TODO: git stuff

    there = location_of(source)
    if is_file(source):
        name = data['project_name']
        # Fail fast if the project folder couldn't be made.
        project_root = path(there, name)
        # TODO: make this sync up with package_dir in setup.py
        src_folder = path(there, name, 'src')
        make_directory(src_folder)
        move(source, src_folder)
        finalize()
    elif is_directory(source):
        # TODO
        print("not implemented yet.")
    else:
        raise OSError("couldn't find file or directory")
示例#40
0
 def _is_valid_file(path):
     try:
         path = Path(path.__str__())
         return path.exists() and path.is_file()
     except TypeError:
         return False
示例#41
0
 def includes_iterator():
     for pattern in includes:
         for path in search_base.glob(pattern):
             # TODO(bhamiltoncx): Handle hidden files on Windows.
             if path.is_file() and (include_dotfiles or not path.name.startswith('.')):
                 yield path.relative_to(search_base)
示例#42
0
 def __iter__(self):
     for path in self._path.iterdir():
         if not path.match('.*') and path.is_file():
             yield Image.open(str(path.absolute()))
示例#43
0
    mongodb_url = 'mongodb://localhost:27017/'
else:
    from os import environ as environment
    lock_file_name = environment['OPENSHIFT_REPO_DIR'] + 'crawler.lock'
    log_file_name = environment['OPENSHIFT_REPO_DIR'] + 'crawler.log'
    mongodb_url = environment['OPENSHIFT_MONGODB_DB_URL']

ts_now = datetime.utcnow()

def log(message):
    with open(log_file_name, 'a') as log_file:
        log_file.write(str(message) + line_sep)

log(line_sep + str(ts_now))

if is_file(lock_file_name):
    log('Already running. Probably.')
    exit()

open(lock_file_name, 'a').close()

import re
def nyaa_rss_parser(tree):
    oldest_torrent_ts = ts_now
    for item in tree.findall('.//item'):
        try:
            title, category, torrent_link, details_link, meta_string, date_string = (child.text for child in item.getchildren())
            s, l, d = re.match('^([0-9]+) seeder\(s\), ([0-9]+) leecher\(s\), ([0-9]+) download\(s\)', meta_string).groups()
            id = ObjectID('{:0>24}'.format(hex(int(re.match('.*=([0-9]+)$', torrent_link).group(1)))[2:]))
            #timestamp = datetime.strptime(date_string, '%a, %d %b %Y %H:%M:%S %z')
            torrent_ts = datetime.strptime(date_string, '%a, %d %b %Y %H:%M:%S +0000')
示例#44
0
文件: generate.py 项目: agdsn/hades
 def list_templates(self):
     return set(str(path.relative_to(base))
                for base, path in yield_all_sources(*self.search_paths)
                if not path.is_symlink() and path.is_file()
                and path.suffix == self.template_suffix)
示例#45
0
def file_exists(p):
    return is_file(p) and path_exists(p)
#!/usr/bin/env python
# coding=utf-8

import os.path
import pathlib

# 检查给定路径是否是文件,能够区分文件和目录
print(os.path.isfile('/etc/passwd'))    # True
print(os.path.isfile('/etc'))           # False
print(os.path.isfile('/does/not/exists'))   # False

# 检查给定路径是否存在,无法能够区分文件和目录
print(os.path.exists('/etc/passwd'))    # True
print(os.path.exists('/etc'))           # True
print(os.path.exists('/does/not/exists'))   # False

# Python2需要安装pathlib, Python3中pathlib是内置模块,无需安装
path = pathlib.Path("/etc/passwd")
print(path.exists())                    # True
print(path.is_file())                   # True
print(path.is_dir())                    # False