def load_sprite(path):
    """Load a sprite from a metadata file path.

    The sprite metadata file should be a JSON object with a 'name' and 'path'
    fields. The 'name' should be a string that uniquely identifies the sprite.
    The 'path' should be the path to the sprite's image file relative to the
    metadata file.

    This metadata dictionary is modified to have the following additional
    fields:

        * `image` -- a Pillow image as loaded from `path`.
        * `width` -- the width of the sprite image.
        * `height` -- the height of the sprite image.

    :returns: the sprite metadata dictionary.
    """
    log.info("Loading sprite from {}".format(path))
    sprite = {}
    with path.open() as meta_fp:
        sprite = json.load(meta_fp)
    if not isinstance(sprite.get("name"), str):
        raise ValueError(
            "Sprite missing 'name' field or the name is not a string")
    sprite["path"] = path.parent / pathlib.Path(sprite["path"])
    log.info("Loading sprite image from {}".format(sprite["path"]))
    sprite["image"] = image.open(str(sprite["path"]))
    sprite["width"] = sprite["image"].size[0]
    sprite["height"] = sprite["image"].size[1]
    return sprite
Exemplo n.º 2
0
def hash_file(path, algorithm='md5'):
    pathlib = requires_package('pathlib')
    path = pathlib.Path(path)
    hash = hashlib.new(algorithm)
    with path.open('rb') as f:
        buffered_read(f.read, hash.update)
    return hash
Exemplo n.º 3
0
def load_scripts(path):
    if not isinstance(path, pathlib.Path):
        path = str(path)
        path = os.path.expanduser(path)
        path = pathlib.Path(path)
    if path.is_dir():
        l = path.glob('*.py')
        # sort b.50.py before a.80.py
        l = sorted(l, key=lambda p: [p.suffixes[-2][1:] if len(p.suffixes)>1 else "",p])
        for p in l:
            load_scripts(p)
    elif path.is_file():
        with path.open("rt") as f:
            c = f.read()
        c = compile(c, str(path), "exec")
        try:
            env.Env['__FILE__'] = str(path.absolute())
            exec (c, env.Env.dict)
            del env.Env['__FILE__']
        except NotImplementedError:
            # Script wanted to be ignored
            pass
        SCRIPTS.append(path)
    else:
        raise Exception("neither file nor dir in load_Scripts", path)
Exemplo n.º 4
0
def chunk_iter(path, csvnum, chunk_size):
    with path.open('rb') as fd:
        unzipped = zipfile.ZipFile(fd)
        fname, = unzipped.namelist()

        with unzipped.open(fname) as fd:
            for chunk_no, chunk in enumerate(pandas.read_csv(fd, chunksize=chunk_size, parse_dates=[5,6]),0): #dtype=dtypes), 1):
                yield csvnum, chunk_no, chunk
Exemplo n.º 5
0
 def __init__(self, path):
     self.path = path
     self.title = path.name
     if path.exists():
         with path.open() as f:
             self.content = f.read()
     else:
         path.touch()
     self.updated = self._getmtime()
Exemplo n.º 6
0
def restart(*more_args):
    """ Check if all loaded Scripts can be compiled and then restart the python
        programm using sys.executable, "-m ht3" and args, where args is all
        -f, -s and -r args, NOT -x.
    """
    import os, sys
    for path in ht3.lib.SCRIPTS:
        with path.open("rt") as f:
            c = f.read()
        try:
            compile(c, str(path), "exec")
        except Exception as e:
            handle_exception(e)
            return
    args = []
    if Check.os.win:
        args.append('"' + sys.executable + '"')
    else:
        args.append(sys.executable)
    args += ['-m','ht3']
    if not '_RESTARTED' in Env.dict:
        args += ['-e', '_RESTARTED', '1']

    it = iter(sys.argv[1:])
    for a in it:
        if a in ['-f', '-s']:
            args += [a, next(it)]
        elif a == '-e':
            k = next(it)
            v = next(it)
            if v == '_RESTARTED':
                v=str(int(v)+1)
            args += [a, k, v]
        elif a == '-r':
            args += [a]
        else:
            assert a == '-x'
            next(it)    # dont execute -x

    it = iter(more_args)
    for a in it:
        if a in ['-f', '-s', '-x']:
            args += [a, next(it)]
        elif a == '-e':
            k = next(it)
            v = next(it)
            if v == '_RESTARTED':
                v=str(int(v)+1)
            args += [a, k, v]
        elif a == '-r':
            args += [a]
        else:
            raise ValueError("Unsupported Argument", a)


    os.execv(sys.executable, args)
Exemplo n.º 7
0
 def test_tofile_fromfile(self):
     with temppath(suffix='.bin') as path:
         path = Path(path)
         np.random.seed(123)
         a = np.random.rand(10).astype('f8,i4,a5')
         a[5] = (0.5, 10, 'abcde')
         with path.open("wb") as fd:
             a.tofile(fd)
         x = np.core.records.fromfile(path, formats='f8,i4,a5', shape=10)
         assert_array_equal(x, a)
Exemplo n.º 8
0
def print_tail_of_log_file(path: Path, out: IO[bytes]) -> None:
    try:
        out.write(b"\nMost recent Eden logs:\n")
        LOG_AMOUNT = 20 * 1024
        with path.open("rb") as logfile:
            size = logfile.seek(0, io.SEEK_END)
            logfile.seek(max(0, size - LOG_AMOUNT), io.SEEK_SET)
            data = logfile.read()
            out.write(data)
    except Exception as e:
        out.write(b"Error reading the log file: %s\n" % str(e).encode())
def _get_command(path, command_directory):
  with path.open('r') as f:
    contents = f.read().split('\0')
    if len(contents) != 2:
      # Old/incomplete file or something; silently ignore it.
      return None
    return '''{
        "directory": "%s",
        "command": "%s",
        "file": "%s",
      },''' % (command_directory, contents[0].replace('"', '\\"'), contents[1])
    def testPathlibPatch(self):
        import pathlib

        file_path = 'test.txt'
        path = pathlib.Path(file_path)
        with path.open('w') as f:
            f.write('test')

        self.assertTrue(self.fs.exists(file_path))
        file_object = self.fs.get_object(file_path)
        self.assertEqual('test', file_object.contents)
Exemplo n.º 11
0
def _get_command(path, command_directory):
    with path.open('r') as f:
        contents = f.read().split('\0')
        if len(contents) != 2:
            # Old/incomplete file or something; silently ignore it.
            return None
        return '''{
        "directory": "%s",
        "command": "%s",
        "file": "%s"
      },''' % (command_directory, ' '.join(new_command_spl), contents[1])
Exemplo n.º 12
0
def read_requirements(path: str):
    res = []
    path = Path(path)
    with path.open(encoding='UTF-8') as f:
        for line in f.readlines():
            if line.startswith('-r'):
                file = line[len('-r'):].strip()
                res += read_requirements(Path(file))
            else:
                res.append(line.strip())
    return res
Exemplo n.º 13
0
def SetupUnregisteredRemovableVolume(path, name):
    """Sets up a removable volume to be recognized during scanning for removable volumes.
    Usually this just sets up a 'volume.name' file in the root with the name.

    Warning.  This just works on the given path.  It doesn't do any checking to make sure
    the given path is actually a removable media of any kind.
    
    """
    path = pathlib.Path(os.path.join(path, "volume.name"))
    with path.open('w') as f:
        f.writelines(['#Volume name for fiepipe local volume\n',name])
Exemplo n.º 14
0
    def testPathlibPathPatch(self):
        from pathlib import Path

        file_path = 'test.txt'
        path = Path(file_path)
        with path.open('w') as f:
            f.write('test')

        self.assertTrue(self.fs.exists(file_path))
        file_object = self.fs.get_object(file_path)
        self.assertEqual('test', file_object.contents)
Exemplo n.º 15
0
 def test_tofile_fromfile(self):
     with temppath(suffix='.bin') as path:
         path = Path(path)
         np.random.seed(123)
         a = np.random.rand(10).astype('f8,i4,a5')
         a[5] = (0.5,10,'abcde')
         with path.open("wb") as fd:
             a.tofile(fd)
         x = np.core.records.fromfile(path,
                                      formats='f8,i4,a5',
                                      shape=10)
         assert_array_equal(x, a)
Exemplo n.º 16
0
    def from_yaml_file(cls, path: typing.Optional[pathlib.Path] = None) -> 'Settings':
        if path is None:
            path = get_config_file_path()
        logger.debug(f"Loading from {path}")

        try:
            with path.open('r') as stream:
                return cls.from_yaml_stream(stream)
        except FileNotFoundError as error:
            raise utils.UsageError(f'Could not find the file {error.filename}')
        except OSError as error:
            raise utils.UsageError(f'Failed to open config file: {error}')
Exemplo n.º 17
0
def write_trends_in_file(crypto_trends, dir_path):
    """
    Write crypto trends dict in a text file
    """
    scan_time = datetime.now() - timedelta(hours=1)
    path = dir_path / (scan_time.strftime("%H") + ".txt")

    with path.open('w') as file:
        for crypto in crypto_trends:
            file.write(
                str(crypto) + " " + str(crypto_trends.get(crypto)) + '\n')
        file.close()
Exemplo n.º 18
0
def test_save_streamed_file_not_closed(resources, outdir):
    with Pdf.open(resources / 'pal.pdf') as pdf:
        path = outdir / "pal.pdf"
        stream = path.open('wb')

        def confirm_opened(progress_percent):
            if progress_percent == 0:
                assert file_descriptor_is_open_for(path)

        pdf.save(stream, progress=confirm_opened)
        assert file_descriptor_is_open_for(
            path), "pikepdf closed a stream it did not open"
Exemplo n.º 19
0
 def _presetup_no_languages(self) -> None:
     """Do not download translation files."""
     content = """\
     # In Docker, we don't often need the "Translations" files, so we're
     # just wasting time and space by downloading them, and this inhibits
     # that.  For users that do need them, it's a simple matter to delete
     # this file and "apt-get update". :)
     Acquire::Languages "none";
     """
     path = self._target / 'etc' / 'apt' / 'apt.conf.d' / 'docker-no-languages'
     with path.open('wt') as fhandle:
         fhandle.write(textwrap.dedent(content))
Exemplo n.º 20
0
    def flow_loader(self, n):
        nfile = "/%05d.fo" % n
        path = Path(self.path + nfile)
        with path.open(mode='r') as flo:
            tag = np.fromfile(flo, np.float32, count=1)[0]
            width = np.fromfile(flo, np.int32, count=1)[0]
            height = np.fromfile(flo, np.int32, count=1)[0]
            nbands = 2
            tmp = np.fromfile(flo, np.float32, count=nbands * width * height)
            flow = np.resize(tmp, (int(height), int(width), int(nbands)))

        return torch.from_numpy(center_crop(flow, self.opt.fineSize))
Exemplo n.º 21
0
 def _try_read_source_json(self, path: Path):
     try:
         with path.open(encoding="utf-8") as f:
             return json.load(f)
     except ValueError:
         print_e(f"Couldn't parse JSON in {path}.")
         self._error_msg(_("Couldn't parse JSON in %s") % path)
         return None
     except OSError:
         print_e(f"Couldn't read {path}")
         self._error_msg(_("Couldn't read %s") % path)
         return None
Exemplo n.º 22
0
    def create(dependency: CounterTokenDependency):
        path = dependency._token.path / dependency.name
        count = dependency.count
        uri = str(dependency.target.basepath)

        self = object.__new__(TokenFile)
        self.count = count
        self.uri = uri
        self.path = path
        logging.debug("Writing token file %s", path)
        with path.open("wt") as fp:
            fp.write(f"{str(count)}\n{uri}\n")
        return self
Exemplo n.º 23
0
 def test_tofile_fromfile(self):
     with temppath(suffix='.bin') as path:
         path = Path(path)
         a = np.empty(10, dtype='f8,i4,a5')
         a[5] = (0.5,10,'abcde')
         a.newbyteorder('<')
         with path.open("wb") as fd:
             a.tofile(fd)
         x = np.core.records.fromfile(path,
                                      formats='f8,i4,a5',
                                      shape=10,
                                      byteorder='<')
         assert_array_equal(x, a)
Exemplo n.º 24
0
def print_log_file(path: Path,
                   out: IO[bytes],
                   whole_file: bool,
                   size: int = 1000000) -> None:
    try:
        with path.open("rb") as logfile:
            if not whole_file:
                LOG_AMOUNT = size
                size = logfile.seek(0, io.SEEK_END)
                logfile.seek(max(0, size - LOG_AMOUNT), io.SEEK_SET)
            for data in read_chunk(logfile):
                out.write(data)
    except Exception as e:
        out.write(b"Error reading the log file: %s\n" % str(e).encode())
Exemplo n.º 25
0
 def _extract(self, entry, output_path):
     """Extract the specified entry."""
     if self.closed:
         raise ValueError("Archive is already closed.")
     if self.mode != "r":
         raise ValueError(
             "Cannot extract entry from archive which is open for writing.")
     if not isinstance(entry, LMArchiveInfo):
         entry = self.getinfo(entry)
     path = Path.joinpath(output_path, entry.path).expanduser().resolve()
     path.parent.mkdir(parents=True, exist_ok=True)
     data = self.read(entry)
     with path.open("wb") as f:
         f.write(data)
Exemplo n.º 26
0
def generate(path, out=None, **kwargs):
    args = Args(kwargs)
    build.mkdir(exist_ok=True)

    if out is None:
        out = build / path.name
    else:
        out = build / out.name

    with path.open() as stream:
        text = prog.sub(args.replace, stream.read())

    with out.open(mode='w') as f:
        f.write(text)
Exemplo n.º 27
0
 def _read(self, dirs, filename, binary=False):
     '''
     Return text/binary contents of a file, None if file does not exist.
     '''
     dirpath = '/'.join(dirs) + '/' + filename if len(dirs) else ''
     path = zipfile.Path(self._zipfile, dirpath + filename)
     if path.exists():
         if binary:
             return path.read_bytes()
         else:
             with path.open() as f:
                 return '\n'.join(f.readlines())
     else:
         return None
Exemplo n.º 28
0
def _get_command(path, command_directory):
    with path.open("r") as f:
        contents = f.read().split("\0")
        if len(contents) != 2:
            # Old/incomplete file or something; silently ignore it.
            return None
        return """{
        "directory": "%s",
        "command": "%s",
        "file": "%s"
      }""" % (
            command_directory,
            contents[0].replace('"', '\\"'),
            contents[1],
        )
Exemplo n.º 29
0
 def _presetup_gzip_indexes(self) -> None:
     """Request zipped version of indexes."""
     content = """\
     # Since Docker users using
     # "RUN apt-get update && apt-get install -y ..." in their Dockerfiles
     # don't go delete the lists files afterwards, we want them to be as
     # small as possible on-disk, so we explicitly request "gz" versions and
     # tell Apt to keep them gzipped on-disk. For comparison, an
     # "apt-get update" layer without this on a pristine "debian:wheezy"
     # base image was "29.88 MB", where with this it was only "8.273 MB".
     Acquire::GzipIndexes "true";
     Acquire::CompressionTypes::Order:: "gz";
     """
     path = self._target / 'etc' / 'apt' / 'apt.conf.d' / 'docker-gzip-indexes'
     with path.open('wt') as fhandle:
         fhandle.write(textwrap.dedent(content))
Exemplo n.º 30
0
    def signing_service_config(self):
        if not self.signing_services:
            raise RuntimeError("No signing service config path set")

        paths = (
            self.signing_services / f'{self.application_id}.yml',
            self.signing_services / f'__default__.yml',
        )

        for path in paths:
            if path.exists():
                with path.open('r') as f:
                    return yaml.safe_load(f.read())

        raise RuntimeError(
            f"No service config found at {self.signing_services}")
Exemplo n.º 31
0
def iter_json_file(path: Path,
                   lookup: str) -> Generator[Union[dict, list], Any, None]:
    """
    Loads given 'path' file, perform lookup and return generator over json list.
    Does not open file until iteration is started.

    :param path: File Path instance
    :param lookup: Dot separated lookup path
    :return:
    """
    with path.open() as f:
        l = json.load(f)
    l = dict_lookup(l, lookup)
    assert isinstance(
        l, list
    ), f"Dict lookup return {type(l)} but list is expected, check your lookup path"
    yield from l
def _get_command(path, command_directory):
    """
    Args:
        path: The pathlib.Path to _compile_command file.
        command_directory: The directory commands are run from.
    Returns a string to stick in compile_commands.json.
    """

    with path.open("r") as f:
        contents = f.read().split("\0")
        if len(contents) != 2:
            # Old/incomplete file or something; silently ignore it.
            return None
        return """{
    "directory": "%s",
    "command": "%s",
    "file": "%s"
    }""" % (command_directory, contents[0].replace('"', '\\"'), contents[1])
Exemplo n.º 33
0
def log_to_file(file, line, with_timestamp=True):
    """Append line to file.

    If file is a single dash, write to stdout instead.

    """
    line = f'{line.rstrip()}\n'
    if with_timestamp:
        timestamp = datetime.datetime.now().isoformat()
        line = f'[{timestamp}] {line}'
    if file == '-':
        sys.stdout.write(line)
    else:
        path = Path(file)
        if not path.exists():
            path.touch(mode=0o664)
        with path.open('a') as fp:
            fp.write(line)
Exemplo n.º 34
0
def init_config_file(cli_provided: t.IO = None) -> CollaborationConfig:
    """Initialize the collaboration file from a variety of sources"""
    if cli_provided is not None:
        return CollaborationConfig.load(cli_provided)
    path_order = ("te.cfg", "~/te.cfg")
    for loc in path_order:
        path = pathlib.Path(loc).expanduser()
        if path.exists():
            break
    else:
        print(
            ("Looks like you haven't set up a collaboration config, "
             "so using the sample one against public data"),
            file=sys.stderr,
        )
        return CollaborationConfig.get_example_config()
    with path.open() as f:
        return CollaborationConfig.load(f)
Exemplo n.º 35
0
def verify(name, path, size, sha256_hash):
    if not tarfile.is_tarfile(str(path)):
        print('Error: ' + name + ' is not a valid tar archive!')
        return False
    actual_size = os.path.getsize(str(path))
    if actual_size != int(size):
        print('Error: size of ' + name + ' (' + str(actual_size) +
              ') does not match expected value (' + size + ')!')
        return False
    if sha256_hash:
        s = hashlib.sha256()
        with path.open(mode='rb') as f:
            data = f.read()
            s.update(data)
        if s.hexdigest() != sha256_hash:
            print('Error: sha256 hash of ' + name +
                  ' does not match provided value!')
            return False
    return True
Exemplo n.º 36
0
def main():
    cli_args = parse_args(sys.argv[1:])
    verbose = cli_args["verbose"]
    converted = playlister(**cli_args)
    num_files = len(converted)

    for i, data in enumerate(converted):
        path, contents = data
        if verbose:
            print("Writing {} of {}: {}...".format(i + 1, num_files,
                                                   str(path)))

        with path.open("w") as f:
            f.write(contents)

            if verbose:
                print("done.")

    return 0
Exemplo n.º 37
0
def write_toml(
    path,
    struct,
    expandvars=False,
    buffering=-1,
    encoding=None,
    errors=None,
    newline=None,
):
    path = file(path, expandvars=expandvars)

    with path.open(
            mode='w',
            buffering=buffering,
            encoding=encoding,
            errors=errors,
            newline=newline,
    ) as fout:
        toml.dump(struct, fout)
 def __GetLicenseText(self):
     path = pathlib.Path(__file__).parent.parent / (
         'res/template/license/' + self.__args.license)
     if path.is_file():
         source = None
         with path.open() as f:
             return f.read()
     else:
         try:
             author = self.__LoadLicenseAuthor()
         except:
             author = None
         if author is None: author = self.__args.username
         copyright = ''
         if author is not None:
             copyright = '\n\n' + 'Copyright (c) {} {}'.format(
                 '{0:%Y}'.format(datetime.datetime.now()), author)
         return 'このソフトウェアは[{}](LICENSE.txt)ライセンスである。'.format(
             self.__args.license) + copyright
Exemplo n.º 39
0
 def _presetup_dpkg_path_exclude(self) -> None:
     """Reducing the size of the image by excluding unnecessary files, like
     man pages, docs, translations and so on.
     """
     content = """\
     path-exclude */__pycache__/*
     path-exclude /usr/share/doc/*
     # we need to keep copyright files for legal reasons
     path-include /usr/share/doc/*/copyright
     path-exclude /usr/share/groff/*
     path-exclude /usr/share/info/*
     path-exclude /usr/share/lintian/*
     path-exclude /usr/share/linda/*
     path-exclude /usr/share/locale/*
     path-exclude /usr/share/man/*
     """
     path = self._target / 'etc' / 'dpkg' / 'dpkg.cfg.d' / 'path_exclude'
     with path.open('wt') as fhandle:
         fhandle.write(textwrap.dedent(content))
Exemplo n.º 40
0
def _get_command(path, command_directory):
    '''Read a _compile_command file and return the parsed JSON string.
    Args:
      path: The pathlib.Path to _compile_command file.
      command_directory: The directory commands are run from.
    Returns:
      a string to stick in compile_commands.json.
    '''
    with path.open('r') as f:
        contents = f.read().split('\0')
        if len(contents) != 2:
            # Old/incomplete file or something; silently ignore it.
            return None

        command, file = contents
        return textwrap.dedent('''\
        {
          "directory": "%s",
          "command": "%s",
          "file": "%s"
        }''' % (command_directory, command.replace('"', '\\"'), file))
Exemplo n.º 41
0
def download_file(opts, url, path):
    filename = url.split('/')[-1]
    response = urllib.request.urlopen(url)
    content_length = int(response.getheader('Content-Length'))

    print_('File size: ' + str(content_length // 1000) + 'kb')

    downloaded = 0
    chunk_length = int(10E3)

    with path.open('wb') as file_:
        while True:
            bytes_ = response.read(chunk_length)
            file_.write(bytes_)

            if opts['--progress']:
                downloaded += len(bytes_)
                print_progress_bar(60, downloaded, content_length)

            if len(bytes_) < chunk_length:
                if opts['--progress']:
                    print('')
                break
Exemplo n.º 42
0
    def store(self, fh, ext):
        """Store a file from a filehandle."""

        h = hashlib.sha256()
        fh.seek(0)
        while True:
            block = fh.read(8192)
            if not block:
                break
            h.update(block)

        filename = h.hexdigest() + ext

        fh.seek(0)
        parent_dir = self._dir_for(filename)
        if not parent_dir.is_dir():
            parent_dir.mkdir(parents=True)
        path = parent_dir / filename

        with path.open('wb') as dest:
            shutil.copyfileobj(fh, dest)

        return filename
Exemplo n.º 43
0
def read_regex(path):
    path = path if not isinstance(path, basestring) else pathlib.Path(path)
    with path.open() as file_:
        entries = file_.read().split('\n')
    expression = '|'.join(['^' + re.escape(piece) for piece in entries if piece.strip()])
    return re.compile(expression)