コード例 #1
0
        def wait(
            self,
            stderr=b''
        ):  # TODO: Bad choice to mimic `proc.wait()` but with different args.
            """Wait for the process and return its status code.

            :param stderr: Previously read value of stderr, in case stderr is already closed.
            :warn: may deadlock if output or error pipes are used and not handled separately.
            :raise GitCommandError: if the return status is not 0"""
            if stderr is None:
                stderr = b''
            stderr = force_bytes(stderr)

            status = self.proc.wait()

            def read_all_from_possibly_closed_stream(stream):
                try:
                    return stderr + force_bytes(stream.read())
                except ValueError:
                    return stderr or b''

            if status != 0:
                errstr = read_all_from_possibly_closed_stream(self.proc.stderr)
                log.debug('AutoInterrupt wait stderr: %r' % (errstr, ))
                raise GitCommandError(self.args, status, errstr)
            # END status handling
            return status
コード例 #2
0
        def wait(self, stderr: Union[None, bytes] = b'') -> int:
            """Wait for the process and return its status code.

            :param stderr: Previously read value of stderr, in case stderr is already closed.
            :warn: may deadlock if output or error pipes are used and not handled separately.
            :raise GitCommandError: if the return status is not 0"""
            if stderr is None:
                stderr = b''
            stderr = force_bytes(data=stderr, encoding='utf-8')

            if self.proc is not None:
                status = self.proc.wait()

                def read_all_from_possibly_closed_stream(stream):
                    try:
                        return stderr + force_bytes(stream.read())
                    except ValueError:
                        return stderr or b''

                if status != 0:
                    errstr = read_all_from_possibly_closed_stream(self.proc.stderr)
                    log.debug('AutoInterrupt wait stderr: %r' % (errstr,))
                    raise GitCommandError(remove_password_if_present(self.args), status, errstr)
            # END status handling
            return status
コード例 #3
0
        def wait(self, stderr=b''):  # TODO: Bad choice to mimic `proc.wait()` but with different args.
            """Wait for the process and return its status code.

            :param stderr: Previously read value of stderr, in case stderr is already closed.
            :warn: may deadlock if output or error pipes are used and not handled separately.
            :raise GitCommandError: if the return status is not 0"""
            if stderr is None:
                stderr = b''
            stderr = force_bytes(stderr)

            status = self.proc.wait()

            def read_all_from_possibly_closed_stream(stream):
                try:
                    return stderr + force_bytes(stream.read())
                except ValueError:
                    return stderr or b''

            with open('logs/log_git.log', 'a+', encoding='utf-8') as f1:
                import time
                loac_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                f1.writelines('time:{}  cmd.py 414 status:{}  {}'.format(loac_time, status, '\n'))

            if status != 0:
                errstr = read_all_from_possibly_closed_stream(self.proc.stderr)
                log.debug('AutoInterrupt wait stderr: %r' % (errstr,))
                raise GitCommandError(self.args, status, errstr)
            # END status handling
            return status
コード例 #4
0
ファイル: fun.py プロジェクト: jrhauser/jrhauser
def write_cache(
        entries: Sequence[Union[BaseIndexEntry, 'IndexEntry']],
        stream: IO[bytes],
        extension_data: Union[None, bytes] = None,
        ShaStreamCls: Type[IndexFileSHA1Writer] = IndexFileSHA1Writer) -> None:
    """Write the cache represented by entries to a stream

    :param entries: **sorted** list of entries
    :param stream: stream to wrap into the AdapterStreamCls - it is used for
        final output.

    :param ShaStreamCls: Type to use when writing to the stream. It produces a sha
        while writing to it, before the data is passed on to the wrapped stream

    :param extension_data: any kind of data to write as a trailer, it must begin
        a 4 byte identifier, followed by its size ( 4 bytes )"""
    # wrap the stream into a compatible writer
    stream_sha = ShaStreamCls(stream)

    tell = stream_sha.tell
    write = stream_sha.write

    # header
    version = 2
    write(b"DIRC")
    write(pack(">LL", version, len(entries)))

    # body
    for entry in entries:
        beginoffset = tell()
        write(entry.ctime_bytes)  # ctime
        write(entry.mtime_bytes)  # mtime
        path_str = str(entry.path)
        path: bytes = force_bytes(path_str, encoding=defenc)
        plen = len(path) & CE_NAMEMASK  # path length
        assert plen == len(
            path), "Path %s too long to fit into index" % entry.path
        flags = plen | (entry.flags & CE_NAMEMASK_INV
                        )  # clear possible previous values
        write(
            pack(">LLLLLL20sH", entry.dev, entry.inode, entry.mode, entry.uid,
                 entry.gid, entry.size, entry.binsha, flags))
        write(path)
        real_size = ((tell() - beginoffset + 8) & ~7)
        write(b"\0" * ((beginoffset + real_size) - tell()))
    # END for each entry

    # write previously cached extensions data
    if extension_data is not None:
        stream_sha.write(extension_data)

    # write the sha over the content
    stream_sha.write_sha()
コード例 #5
0
ファイル: fun.py プロジェクト: 571451370/devstack_mitaka
def write_cache(entries,
                stream,
                extension_data=None,
                ShaStreamCls=IndexFileSHA1Writer):
    """Write the cache represented by entries to a stream

    :param entries: **sorted** list of entries
    :param stream: stream to wrap into the AdapterStreamCls - it is used for
        final output.

    :param ShaStreamCls: Type to use when writing to the stream. It produces a sha
        while writing to it, before the data is passed on to the wrapped stream

    :param extension_data: any kind of data to write as a trailer, it must begin
        a 4 byte identifier, followed by its size ( 4 bytes )"""
    # wrap the stream into a compatible writer
    stream = ShaStreamCls(stream)

    tell = stream.tell
    write = stream.write

    # header
    version = 2
    write(b"DIRC")
    write(pack(">LL", version, len(entries)))

    # body
    for entry in entries:
        beginoffset = tell()
        write(entry[4])  # ctime
        write(entry[5])  # mtime
        path = entry[3]
        path = force_bytes(path, encoding=defenc)
        plen = len(path) & CE_NAMEMASK  # path length
        assert plen == len(
            path), "Path %s too long to fit into index" % entry[3]
        flags = plen | (entry[2] & CE_NAMEMASK_INV
                        )  # clear possible previous values
        write(
            pack(">LLLLLL20sH", entry[6], entry[7], entry[0], entry[8],
                 entry[9], entry[10], entry[1], flags))
        write(path)
        real_size = ((tell() - beginoffset + 8) & ~7)
        write(b"\0" * ((beginoffset + real_size) - tell()))
    # END for each entry

    # write previously cached extensions data
    if extension_data is not None:
        stream.write(extension_data)

    # write the sha over the content
    stream.write_sha()
コード例 #6
0
ファイル: base.py プロジェクト: Xender/GitPython
 def _store_path(self, filepath, fprogress):
     """Store file at filepath in the database and return the base index entry
     Needs the git_working_dir decorator active ! This must be assured in the calling code"""
     st = os.lstat(filepath)  # handles non-symlinks as well
     stream = None
     if S_ISLNK(st.st_mode):
         # in PY3, readlink is string, but we need bytes. In PY2, it's just OS encoded bytes, we assume UTF-8
         stream = BytesIO(force_bytes(os.readlink(filepath), encoding=defenc))
     else:
         stream = open(filepath, "rb")
     # END handle stream
     fprogress(filepath, False, filepath)
     istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream))
     fprogress(filepath, True, filepath)
     stream.close()
     return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode), istream.binsha, 0, to_native_path_linux(filepath)))
コード例 #7
0
ファイル: fun.py プロジェクト: MattDMo/CloseObsoleteGitViews
def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1Writer):
    """Write the cache represented by entries to a stream

    :param entries: **sorted** list of entries
    :param stream: stream to wrap into the AdapterStreamCls - it is used for
        final output.

    :param ShaStreamCls: Type to use when writing to the stream. It produces a sha
        while writing to it, before the data is passed on to the wrapped stream

    :param extension_data: any kind of data to write as a trailer, it must begin
        a 4 byte identifier, followed by its size ( 4 bytes )"""
    # wrap the stream into a compatible writer
    stream = ShaStreamCls(stream)

    tell = stream.tell
    write = stream.write

    # header
    version = 2
    write(b"DIRC")
    write(pack(">LL", version, len(entries)))

    # body
    for entry in entries:
        beginoffset = tell()
        write(entry[4])         # ctime
        write(entry[5])         # mtime
        path = entry[3]
        path = force_bytes(path, encoding=defenc)
        plen = len(path) & CE_NAMEMASK      # path length
        assert plen == len(path), "Path %s too long to fit into index" % entry[3]
        flags = plen | (entry[2] & CE_NAMEMASK_INV)     # clear possible previous values
        write(pack(">LLLLLL20sH", entry[6], entry[7], entry[0],
                   entry[8], entry[9], entry[10], entry[1], flags))
        write(path)
        real_size = ((tell() - beginoffset + 8) & ~7)
        write(b"\0" * ((beginoffset + real_size) - tell()))
    # END for each entry

    # write previously cached extensions data
    if extension_data is not None:
        stream.write(extension_data)

    # write the sha over the content
    stream.write_sha()
コード例 #8
0
ファイル: base.py プロジェクト: missionfocus/GitPython
 def _store_path(self, filepath, fprogress):
     """Store file at filepath in the database and return the base index entry
     Needs the git_working_dir decorator active ! This must be assured in the calling code"""
     st = os.lstat(filepath)     # handles non-symlinks as well
     stream = None
     if S_ISLNK(st.st_mode):
         # in PY3, readlink is string, but we need bytes. In PY2, it's just OS encoded bytes, we assume UTF-8
         stream = BytesIO(force_bytes(os.readlink(filepath), encoding='utf-8'))
     else:
         stream = open(filepath, 'rb')
     # END handle stream
     fprogress(filepath, False, filepath)
     istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream))
     fprogress(filepath, True, filepath)
     stream.close()
     return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode),
                            istream.binsha, 0, to_native_path_linux(filepath)))
コード例 #9
0
ファイル: cmd.py プロジェクト: gitprime/GitPython
        def wait(self, stderr=b''):  # TODO: Bad choice to mimic `proc.wait()` but with different args.
            """Wait for the process and return its status code.

            :param stderr: Previously read value of stderr, in case stderr is already closed.
            :warn: may deadlock if output or error pipes are used and not handled separately.
            :raise GitCommandError: if the return status is not 0"""
            if stderr is None:
                stderr = b''
            stderr = force_bytes(stderr)

            status = self.proc.wait()

            def read_all_from_possibly_closed_stream(stream):
                try:
                    return stderr + force_bytes(stream.read())
                except ValueError:
                    return stderr or b''

            if status != 0:
                errstr = read_all_from_possibly_closed_stream(self.proc.stderr)
                log.debug('AutoInterrupt wait stderr: %r' % (errstr,))
                raise GitCommandError(self.args, status, errstr)
            # END status handling
            return status
コード例 #10
0
 def read_all_from_possibly_closed_stream(stream):
     try:
         return stderr + force_bytes(stream.read())
     except ValueError:
         return stderr or b''
コード例 #11
0
ファイル: cmd.py プロジェクト: gitprime/GitPython
 def read_all_from_possibly_closed_stream(stream):
     try:
         return stderr + force_bytes(stream.read())
     except ValueError:
         return stderr or b''