Ejemplo n.º 1
0
def write_to_file(file: io.FileIO, data: bytes, start: int = 0):
    length_to_write = len(data)
    file.seek(start)
    written = 0
    while written < length_to_write:
        written += file.write(data[written:])
    os.fsync(file.fileno())
Ejemplo n.º 2
0
 def write_config(self):
     """Write current config to the config file
     """
     config_file = FileIO(self.container + REPO_FILE, "wb")
     config_file.seek(0)
     config_file.truncate(0)
     config_file.write(json.dumps(self.data).encode())
Ejemplo n.º 3
0
class FileRestoreResponse(RestoreResponse):

    BODY_TAG_SUFFIX = "-body"
    EXTENSION = "xml"

    def __init__(self, username=None, items=False):
        super(FileRestoreResponse, self).__init__(username, items)
        self.filename = os.path.join(settings.SHARED_DRIVE_CONF.restore_dir, uuid4().hex)

        self.response_body = FileIO(self.get_filename(self.BODY_TAG_SUFFIX), "w+")

    def get_filename(self, suffix=None):
        return "{filename}{suffix}.{ext}".format(filename=self.filename, suffix=suffix or "", ext=self.EXTENSION)

    def __add__(self, other):
        if not isinstance(other, FileRestoreResponse):
            raise NotImplemented()

        response = FileRestoreResponse(self.username, self.items)
        response.num_items = self.num_items + other.num_items

        self.response_body.seek(0)
        other.response_body.seek(0)

        shutil.copyfileobj(self.response_body, response.response_body)
        shutil.copyfileobj(other.response_body, response.response_body)

        return response

    def finalize(self):
        """
        Creates the final file with start and ending tag
        """
        with open(self.get_filename(), "w") as response:
            # Add 1 to num_items to account for message element
            items = self.items_template.format(self.num_items + 1) if self.items else ""
            response.write(
                self.start_tag_template.format(
                    items=items, username=self.username, nature=ResponseNature.OTA_RESTORE_SUCCESS
                )
            )

            self.response_body.seek(0)
            shutil.copyfileobj(self.response_body, response)

            response.write(self.closing_tag)

        self.finalized = True
        self.close()

    def get_cache_payload(self, full=False):
        return {"data": self.get_filename() if not full else open(self.get_filename(), "r")}

    def as_string(self):
        with open(self.get_filename(), "r") as f:
            return f.read()

    def get_http_response(self):
        headers = {"Content-Length": os.path.getsize(self.get_filename())}
        return stream_response(open(self.get_filename(), "r"), headers)
Ejemplo n.º 4
0
 def flipByteAt(inputfile, position):
     """Flips the bits for the byte at the specified position in the input file."""
     f = FileIO(inputfile, "r+")
     f.seek(position)
     byte = ord(f.read(1))
     f.seek(-1, 1)  # go back 1 byte from current position
     f.write(struct.pack("B", byte ^ 0xFF))  # read in the byte and XOR it
     f.close()
Ejemplo n.º 5
0
 def flipByteAt(inputfile, position):
     """Flips the bits for the byte at the specified position in the input file."""
     f = FileIO(inputfile, "r+")
     f.seek(position)
     byte = ord(f.read(1))
     f.seek(-1, 1)   # go back 1 byte from current position
     f.write(struct.pack("B", byte^0xFF))    # read in the byte and XOR it
     f.close()
Ejemplo n.º 6
0
 def seek(self, position, whence=0):
     if whence == 1:
         position += self.__currentpos__
     elif whence == 2:
         position += self.__size__
         
     self.__currentpos__ = position
     FileIO.seek(self, self.__startpos__ + position)
     return position
Ejemplo n.º 7
0
    def _seek_bytes(self, bytes_, whence=0):
        '''
        :param bytes_: byte offset
        :type bytes_: int
        
        :param whence:
        
        Seeks a file by bytes instead of datagrams.
        '''

        FileIO.seek(self, bytes_, whence)
Ejemplo n.º 8
0
def read_from_file(file: io.FileIO, start: int, stop: int) -> bytes:
    assert stop > start
    file.seek(start)
    data = bytes()
    while file.tell() < stop:
        read_data = file.read(stop - file.tell())
        if read_data == b'':
            raise ReachEndOfFile('Read until the end of file')
        data += read_data
    assert len(data) == stop - start
    return data
Ejemplo n.º 9
0
def read_from_file(file_fd: io.FileIO, start: int, stop: int) -> bytes:
    length = stop - start
    assert length >= 0
    file_fd.seek(start)
    data = bytes()
    while file_fd.tell() < stop:
        read_data = file_fd.read(stop - file_fd.tell())
        if read_data == b'':
            raise ReachedEndOfFile('Read until the end of file')
        data += read_data
    assert len(data) == length
    return data
Ejemplo n.º 10
0
    def flipBitAt(inputfile, position):
        """Flips the bit at the specified position in the input file."""
        if not 0 <= position < (8 * os.path.getsize(inputfile)):
            raise IndexError("Position " + str(position) + " is out of range")

        f = FileIO(inputfile, "r+")
        f.seek(position / 8)
        byte = ord(f.read(1))
        f.seek(-1, 1)  # go back 1 byte from the current position
        bitnum = position % 8
        f.write(struct.pack("B", byte ^ (1 << (7 - bitnum))))
        f.close()
Ejemplo n.º 11
0
 def flipBitAt(inputfile, position):
     """Flips the bit at the specified position in the input file."""
     if not 0<=position<(8*os.path.getsize(inputfile)):
         raise IndexError("Position "+str(position)+" is out of range")
     
     f = FileIO(inputfile, "r+")
     f.seek(position/8)
     byte = ord(f.read(1))
     f.seek(-1, 1)   # go back 1 byte from the current position
     bitnum = position%8
     f.write(struct.pack("B", byte^(1<<(7-bitnum))))
     f.close()
Ejemplo n.º 12
0
def read_from_file(file_fd: io.FileIO, start: int, stop: int) -> bytes:
    length = stop - start
    assert length >= 0
    file_fd.seek(start)
    data = bytes()
    while file_fd.tell() < stop:
        read_data = file_fd.read(stop - file_fd.tell())
        if read_data == b'':
            raise EndOfFileError('Read until the end of file_fd')
        data += read_data
    assert len(data) == length
    return data
Ejemplo n.º 13
0
    def read_id3(file_handle: FileIO, skip_v1: bool = False) -> ID3Base:
        id3 = ID3v2(file_handle)
        if id3.is_valid_id3 or skip_v1:
            return id3

        # Check for an id3v1 tag
        current_file_position = file_handle.tell()
        file_handle.seek(-128, SEEK_END)
        block = file_handle.read(128)
        id3 = ID3v1(block)

        file_handle.seek(current_file_position, SEEK_SET)
        return id3
Ejemplo n.º 14
0
def read_from_file(file_fd: io.FileIO, start: int, end: int) -> bytes:
    length = end - start
    assert length >= 0
    file_fd.seek(start)
    data = bytes()
    while file_fd.tell() < end:
        # The read() (when called with a positive argument), readinto() and write() methods on this class will only make one system call.
        read_data = file_fd.read(end - file_fd.tell())
        if read_data == b'':
            raise EndOfFileError('read until the end of file_fd')
        data += read_data
    assert len(data) == length
    return data
Ejemplo n.º 15
0
    def get_file_contents(self,
                          folder_id=None,
                          folder_path=None,
                          folder=None,
                          file_name=None,
                          file_id=None,
                          local_folder=None):
        """
        One of the folder parameters must be supplied if file_id is not set
        :param folder_id: Google Drive folder id
        :param folder_path: Path separated by /
        :param folder: Folder dictionary with 'id' key as folder id

        :param file_name:
        :param file_id:

        :param local_folder: If set it will download the file to local drive and return the filename

        :return: if local folder the file name, otherwise a BytesIO stream
        """

        if not file_id:
            folder_id = self.get_folder_id(folder_id, folder_path, folder)
            files = self.file_list(
                q=self.build_q(name=file_name, folder=folder_id))
            if not files:
                raise FileNotFound
            file_id = files[0]['id']
            file_size = int(files[0]['size'])
        else:
            file_size = int(self.get_file(file_id=file_id)['size'])

        if local_folder:
            if not file_name:
                file_name = self.get_file(file_id=file_id)['name']
            fh = FileIO(os.path.join(local_folder, file_name), 'wb')
        else:
            fh = BytesIO()
        if file_size > 0:
            request = self.service.files().get_media(fileId=file_id)
            downloader = MediaIoBaseDownload(fh,
                                             request,
                                             chunksize=1024 * 1024 * 10)
            done = False
            while done is False:
                status, done = downloader.next_chunk()
        if local_folder:
            return file_name
        else:
            fh.seek(0)
            return fh
Ejemplo n.º 16
0
    def __init__(self, initer):
        if isinstance(initer, str):
            #test if initer is path to a file ...
            if os.path.exists(initer):
                iobj = FileIO(initer, 'rb')
            else:
                # it should be a binary buffer ..
                iobj = StringIO()
                iobj.write(initer)
                iobj.seek(0, os.SEEK_SET)
        else:
            #it should be an io object (StringIO, FiliIO)
            #(interface will be great in python)
            iobj = initer
        ElfStream.__init__(self, iobj)
        self.iobj = iobj

        # HEADER
        self.header = Header(self)

        # LOAD PROGRAM and SECTION HEADER TABLES
        self.prog_headers = self.load_entries(self.header.ph_offset,
                                              self.header.ph_count,
                                              ProgramHeader)
        self.sect_headers = self.load_entries(self.header.sh_offset,
                                              self.header.sh_count,
                                              SectionHeader)

        # LOAD SECTION HEADERS STRING TABLE
        strtab = self.sect_headers[self.header.shstrndx]
        self.shstrtab = StringTable(self.io, strtab.offset, strtab.size)

        # Create a section dictionary
        self.sect_dict = {}
        for sec in self.sect_headers:
            self.sect_dict[sec.name] = sec

        # LOAD STRING TABLE
        if '.strtab' in self.sect_dict:
            strtab = self.sect_dict['.strtab']
            self.strtab = StringTable(self.io, strtab.offset, strtab.size)

        # LOAD SYMBOL TABLE
        if '.symtab' in self.sect_dict:
            symtab = self.sect_dict['.symtab']
            count = symtab.size / Symbol.LENGTH
            self.symbols = self.load_entries(symtab.offset, count, Symbol)
Ejemplo n.º 17
0
    def open(self, name, name2, mode='rb'):

        file_data = self.check_file_exists(name)

        if file_data is None:
            return "Nie ma!"
        if file_data['mimeType'] == self.GOOGLE_DRIVE_FOLDER_MIMETYPE:
            return "To folder!"
        request = self.drive_service.files().get_media(fileId=file_data['id'])
        # fh = BytesIO()
        fh2 = FileIO(name2, 'wb')
        downloader = MediaIoBaseDownload(fh2, request)
        done = False
        while done is False:
            _, done = downloader.next_chunk()
        fh2.seek(0)
        return File(fh2, name)
Ejemplo n.º 18
0
    def __init__(self, initer ):
        if isinstance(initer, str):
            #test if initer is path to a file ...
            if os.path.exists(initer) :
                iobj = FileIO(initer,'rb')
            else : 
                # it should be a binary buffer ..
                iobj = StringIO()
                iobj.write(initer)
                iobj.seek(0,os.SEEK_SET)
        else :
            #it should be an io object (StringIO, FiliIO) 
            #(interface will be great in python)
            iobj = initer
        ElfStream.__init__(self,iobj)
        self.iobj = iobj
        
        # HEADER
        self.header = Header(self)
        
        # LOAD PROGRAM and SECTION HEADER TABLES
        self.prog_headers = self.load_entries(self.header.ph_offset, self.header.ph_count, ProgramHeader)
        self.sect_headers = self.load_entries(self.header.sh_offset, self.header.sh_count, SectionHeader)
        
        # LOAD SECTION HEADERS STRING TABLE
        strtab = self.sect_headers[self.header.shstrndx]
        self.shstrtab = StringTable(self.io, strtab.offset, strtab.size)
        
        # Create a section dictionary
        self.sect_dict = {}
        for sec in self.sect_headers:
            self.sect_dict[sec.name] = sec
        
        # LOAD STRING TABLE
        if '.strtab' in self.sect_dict:
            strtab = self.sect_dict['.strtab']
            self.strtab = StringTable(self.io, strtab.offset, strtab.size)

        # LOAD SYMBOL TABLE
        if '.symtab' in self.sect_dict:
            symtab = self.sect_dict['.symtab']
            count = symtab.size / Symbol.LENGTH
            self.symbols = self.load_entries(symtab.offset, count, Symbol)
Ejemplo n.º 19
0
class filestream_range_iterator(Iterable):
    """
    A class that mimics FileIO and implements an iterator that returns a
    fixed-sized sequence of bytes. Beginning from `start` to `end`.

    BBB: due to a possible bug in Zope>4, <=4.1.3, couldn't be subclass of FileIO
         as Iterators.filestream_iterator
    """

    def __init__(self, name, mode='rb', bufsize=-1, streamsize=1 << 16, start=0, end=None):
        self._io = FileIO(name, mode=mode)
        self.streamsize = streamsize
        self.start = start
        self.end = end
        self._io.seek(start, 0)

    def __iter__(self):
        if self._io.closed:
            raise ValueError("I/O operation on closed file.")
        return self

    def __next__(self):
        if self.end is None:
            bytes = self.streamsize
        else:
            bytes = max(min(self.end - self._io.tell(), self.streamsize), 0)
        data = self._io.read(bytes)
        if not data:
            raise StopIteration
        return data

    next = __next__

    def close(self):
        self._io.close()

    # BBB: is it necessary to implement __len__ ?
    # def __len__(self)

    def read(self, size=-1):
        return self._io.read(size)
Ejemplo n.º 20
0
    def read_from(cls, sector_file: io.FileIO) -> "SectorInfo":

        # Save current position in file
        tell = sector_file.tell()

        # Go to end of file minus size of length.
        sector_file.seek(-cls.length_parser.size, 2)

        length, = cls.length_parser.unpack_from_file(sector_file)

        print(length)

        objects = []

        if length:
            # Go back to saved position
            sector_file.seek(tell)

            for _ in range(length):
                objects.append(Object.read_from(sector_file))

        return SectorObjects(objects=objects)
Ejemplo n.º 21
0
    def read_from(cls, sector_file: io.FileIO) -> "SectorInfo":

        # Save current position in file
        tell = sector_file.tell()

        # Go to end of file minus size of length.
        sector_file.seek(-cls.length_parser.size, 2)

        length,  = cls.length_parser.unpack_from_file(sector_file)

        print(length)

        objects = []

        if length:
            # Go back to saved position
            sector_file.seek(tell)

            for _ in range(length):
                objects.append(Object.read_from(sector_file))

        return SectorObjects(objects=objects)
Ejemplo n.º 22
0
    def load(io: FileIO):
        instance = MPK(io)
        magic = IOHelper.read_ascii_string(io, 4)
        if magic == MPK_MAGIC:
            version, count = IOHelper.read_struct(io, '<2i')
            io.seek(52, SEEK_CUR)
            instance.set_version(version)
            for i in range(count):
                is_zip, index, offset, data_size, zip_size = IOHelper.read_struct(
                    io, '<2i3q')
                name_data = io.read(224)
                name = name_data[:name_data.find(b'\x00')].decode(
                    encoding='ascii')
                instance.insert_file({
                    'is_zip': is_zip != 0,
                    'index': index,
                    'offset': offset,
                    'data_size': data_size,
                    'zip_size': zip_size,
                    'name': name,
                    'data': None,
                })

        return instance
Ejemplo n.º 23
0
    def read_file_data(self, index: int, handle: FileIO, start: int, size: int,
                       lock: threading.RLock) -> bytes:
        """
        获取文件指定位置数据

        @param {int} index - 处理读取的线程索引
        @param {object} handle - 打开的文件句柄
        @param {int} start - 要获取的数据开始位置
        @param {int} size - 要获取的数据大小
        @param {threading.RLock} lock - 读取数据的锁对象

        @returns {bytes} - 获取到的数据字典
            注:如果开始位置超过文件大小,将返回b''; 如果要获取的数据大小超过文件,则返回真实的数据大小
                对于无法预知文件大小的情况,如果返回b''也代表着文件结束
        """
        lock.acquire()
        try:
            # 移动到指定位置并获取数据
            handle.seek(start)
            _bytes = handle.read(size)

            return _bytes
        finally:
            lock.release()
Ejemplo n.º 24
0
    def parse_file(self):
        """Parses the video file, obtaining metadata that can be accessed thru
        this class' properties.

        :raises ValueError: File is not an MP4 format video.
        """
        the_file = FileIO(self.path, 'rb')

        # the mimetype could be incorrect
        # we'll let the file decide
        if not self.video_format in self.mimetype:
            the_file.seek(0x00, SEEK_SET)
            first_12 = the_file.read(12)
            # split the dword and the ftyp
            size_dword = struct.unpack('>I', first_12[0:4])[0]
            ftyp_val = first_12[4:]
            # validate if mp4
            if size_dword > 0:
                if ftyp_val not in self.supported_ftypes:
                    the_file.close()
                    raise ValueError("{} is not an MP4 video.".format(
                        self.name))
            else:
                the_file.close()
                raise ValueError("{} is not an MP4 video.".format(self.name))

        # determine the size of the `compatible_brand` field
        # this is the very first DWORD of the file
        the_file.seek(0x00, SEEK_SET)
        compat_brand_end = the_file.read(4)
        compat_brand_end = struct.unpack('>I', compat_brand_end)[0]
        compat_brand_size = compat_brand_end - 0x10
        # get the `compatible_brand` field
        the_file.seek(0x10, SEEK_SET)
        compat_brand = the_file.read(compat_brand_size)

        # PARSE THE FILE!!!
        try:
            if compat_brand in self.supported_brands:
                self._read_mp4_container(the_file, compat_brand_end)
        except NoMoovAtomException:
            #TODO: ADD LOGGING
            #FIXME: MAKE THIS INTO A LOGGER
            print("WARNING: {} has no moov atom!".format(self.name))
        except NoReadVideoHeaderException:
            print("WARNING: Couldn't get information from {}!".format(
                self.name))

        the_file.close()
        self._parsed_header = True
Ejemplo n.º 25
0
def resolve_http(path: str, tmpfile: io.FileIO):
    resp = requests.get(path)
    resp.raise_for_status()
    tmpfile.write(resp.content)
    tmpfile.seek(0)
    return tmpfile.name
Ejemplo n.º 26
0
def parse(f: FileIO, dbfile: str, use_prefix: bool = False) -> bool:
    prefix = "".join(
        c for c in Path(str(f.name)).name.replace(".cfg.bin", "").upper()
        if "A" <= c <= "Z" or "0" <= c <= "9" or c == "_")
    prefix += "_"

    magic = f.read(4)
    if magic != b"RDBN":
        logger.error("magic not found")
        return False

    header_size = int.from_bytes(f.read(2), "little")
    if header_size != 0x50:
        logger.error("header must be 50 byte long")
        return False
    f.seek(0)
    header = parse_header(f.read(header_size))

    logger.debug(header)

    f.seek(header.header_size + header.body_size)
    strings = f.read()
    strings_table: Dict[int, str] = {}
    for i in strings.rstrip(b"\0").split(b"\0"):
        strings_table[binascii.crc32(i)] = i.decode()

    f.seek(header.header_size)

    tmp_tables: List[Table] = []
    nondata_strings: List[str] = []
    for i in range(header.table_count):
        name_crc, unk1, col_offset, col_count, zero1, zero2 = struct.unpack(
            "<2I 2H II", f.read(header.item_size))
        f.read(header.item_data_size - header.item_size)
        table = Table(
            id=name_crc,
            name=strings_table[name_crc],
            unk1=unk1,
            col_offset=col_offset,
            col_count=col_count,
            zero1=zero1,
            zero2=zero2,
            columns=[],
        )
        logger.debug(table)
        nondata_strings.append(table.name)
        tmp_tables.append(table)

    tmp_columns: List[Column] = []
    for i in range(header.column_count):
        name_crc, subid, id, size, offset, count = struct.unpack(
            "<I 2H 2I I", f.read(header.item_size))
        f.read(header.item_data_size - header.item_size)
        col = Column(
            id=name_crc,
            name=strings_table[name_crc],
            typeid=id,
            sub_typeid=subid,
            size=size,
            offset=offset,
            count=count,
        )
        logger.debug(col)
        nondata_strings.append(col.name)
        tmp_columns.append(col)
    columns = {x.name: x for x in tmp_columns}

    # table-column relationships
    for t in tmp_tables:
        for i in range(t.col_offset, t.col_offset + t.col_count):
            t.columns.append(tmp_columns[i])
        logger.debug("table {} consists of columns {}".format(
            t.name, [c.name for c in t.columns]))

    tables = {x.name: x for x in tmp_tables}

    tmp_lists: List[DataList] = []
    lists: Dict[str, DataList] = {}
    for i in range(header.list_count):
        idx, unk, offset, size, count, listname_crc = struct.unpack(
            "<2HIIII", f.read(header.item_size))
        f.read(header.item_data_size - header.item_size)
        nondata_strings.append(strings_table[listname_crc])
        list_ = DataList(
            id=listname_crc,
            index=idx,
            name=strings_table[listname_crc],
            unk=unk,
            offset=offset,
            size=size,
            count=count,
        )
        tmp_lists.append(list_)
        logger.debug(list_)
    lists = {x.name: x for x in tmp_lists}

    con = sqlite3.connect(dbfile)

    # list-table relationship
    list_table: Dict[str, str] = {}  # list_name -> table_name
    for l in lists.values():
        table_cand = next(
            (t.name for idx, t in enumerate(tmp_tables) if idx == l.index),
            None)
        if table_cand is None:
            logger.warning("table for list {} not found".format(l))
            return False
        list_table[l.name] = table_cand
        logger.debug("list {} is a list for tabel {}".format(
            l.name, table_cand))

    # list, table, and column ids <-> string table offset relations
    # --
    # All item ids I have discovered so far equals to the crc32 of item names.
    # So this relation is not necessarily required.
    ids = [
        int.from_bytes(f.read(4), "little")
        for _ in range(header.id_name_table_size // 8)
    ]
    name_offsets = [
        int.from_bytes(f.read(4), "little")
        for _ in range(header.id_name_table_size // 8)
    ]
    all_items: Dict[int, str] = {i.id: i.name for i in lists.values()}
    all_items.update({i.id: i.name for i in tables.values()})
    all_items.update({i.id: i.name for i in columns.values()})
    for id, name_offset in zip(ids, name_offsets):
        name = all_items.get(id, None)
        if name is None:
            logger.warning(
                "id (crc32 of name) {} is not recorded, but found in id-name table"
                .format(id))
            continue
        name_ = strings[name_offset:].split(b"\0")[0].decode()
        if name != name_:
            logger.warning("name for id {} should be {} but {}".format(
                id, name_, name))

    list_sorter: Callable[[DataList], int] = lambda l: l.offset
    for l in sorted(lists.values(), key=list_sorter):
        f.seek(header.header_size + header.item_data_size *
               (header.table_count + header.column_count + header.list_count) +
               header.id_name_table_size + l.offset)
        table_name = list_table[l.name]
        table_name_sql = table_name if not use_prefix else (prefix +
                                                            table_name)

        # fetch type name
        table_type = tables[table_name]

        logger.debug("list {} starts at 0x{:08x}".format(l.name, f.tell()))

        # type convertors
        convertors = [
            DBType(c.typeid, c.sub_typeid, c.name, c.size, c.count)
            for c in table_type.columns
        ]

        # get table information
        columns = ", ".join("{} {}".format(c.name, c.sqlite_type)
                            for c in convertors)

        con.execute("CREATE TABLE IF NOT EXISTS {} ({});".format(
            table_name_sql, columns))

        # insert information
        for i in range(l.count):
            row_data = f.read(l.size)
            row_out: List[Optional[Union[str, int, bytes]]] = []
            pos = 0
            last_offset: Optional[int] = None
            for col, conv in zip(table_type.columns, convertors):
                if last_offset:
                    pad = col.offset - last_offset
                    if pad > 0:
                        pos += pad
                        if i == 0:
                            logger.debug(
                                "{}-byte padding inserted at {}".format(
                                    pad, pos))
                    elif pad < 0:
                        logger.error("padding could not be negative")
                data = row_data[pos:pos + col.size * col.count]
                pos += col.size * col.count
                last_offset = col.offset + col.size

                if conv.id == 3 and conv.subid in (0x14, 0x15):
                    addr = conv.convert(data)
                    assert isinstance(addr, int)
                    if addr != 0 and addr != 0xFFFFFFFF and addr < len(
                            strings):
                        # TODO: more accurate string offset detection
                        s = strings[addr:].split(b"\0")[0].decode()
                        if strings[addr - 1] != 0:
                            data = "[{:08x}]".format(addr)
                        elif s in nondata_strings:
                            data = "[{:08x}]".format(addr)
                        else:  # OK
                            data = s
                    else:
                        data = "[{:08x}]".format(addr)
                else:
                    data = conv.convert(data)
                row_out.append(data)
            if i == 0 and pos != l.size:
                logger.debug(
                    "data reading ends at {}, leaving {} byte unread".format(
                        pos, l.size - pos))
                logger.debug(
                    "unread data (only the first row will be shown): {}".
                    format(row_data[pos:]))
            placeholder = ", ".join("?" * len(row_out))
            con.execute(
                "INSERT INTO {} VALUES ({});".format(table_name_sql,
                                                     placeholder), row_out)
        logger.debug("list {} ends at 0x{:08x}".format(l.name, f.tell()))

    con.commit()
    con.close()
    logger.debug("proccessing finished at 0x{:08x}".format(f.tell()))
    if f.tell() != header.header_size + header.body_size:
        logger.warning(
            "data parsing finished at 0x{:08x}, but the data seems ends at {:08x}"
            .format(
                f.tell(),
                header.header_size + header.body_size,
            ))
    return True
Ejemplo n.º 27
0
def rewind(file: FileIO):
    file.seek(0)
Ejemplo n.º 28
0
class FileRestoreResponse(RestoreResponse):

    BODY_TAG_SUFFIX = '-body'
    EXTENSION = 'xml'

    def __init__(self, username=None, items=False):
        super(FileRestoreResponse, self).__init__(username, items)
        self.filename = os.path.join(settings.SHARED_DRIVE_CONF.restore_dir,
                                     uuid4().hex)

        self.response_body = FileIO(self.get_filename(self.BODY_TAG_SUFFIX),
                                    'w+')

    def get_filename(self, suffix=None):
        return "{filename}{suffix}.{ext}".format(filename=self.filename,
                                                 suffix=suffix or '',
                                                 ext=self.EXTENSION)

    def __add__(self, other):
        if not isinstance(other, FileRestoreResponse):
            raise NotImplemented()

        response = FileRestoreResponse(self.username, self.items)
        response.num_items = self.num_items + other.num_items

        self.response_body.seek(0)
        other.response_body.seek(0)

        shutil.copyfileobj(self.response_body, response.response_body)
        shutil.copyfileobj(other.response_body, response.response_body)

        return response

    def finalize(self):
        """
        Creates the final file with start and ending tag
        """
        with open(self.get_filename(), 'w') as response:
            # Add 1 to num_items to account for message element
            items = self.items_template.format(self.num_items +
                                               1) if self.items else ''
            response.write(
                self.start_tag_template.format(
                    items=items,
                    username=self.username,
                    nature=ResponseNature.OTA_RESTORE_SUCCESS))

            self.response_body.seek(0)
            shutil.copyfileobj(self.response_body, response)

            response.write(self.closing_tag)

        self.finalized = True
        self.close()

    def get_cache_payload(self, full=False):
        return {
            'data':
            self.get_filename() if not full else open(self.get_filename(), 'r')
        }

    def as_string(self):
        with open(self.get_filename(), 'r') as f:
            return f.read()

    def get_http_response(self):
        headers = {'Content-Length': os.path.getsize(self.get_filename())}
        return stream_response(open(self.get_filename(), 'r'), headers)
Ejemplo n.º 29
0
class CloudStorageIO(StorageIOSeekable):
    def __init__(self, uri):
        """
        """
        StorageIOSeekable.__init__(self, uri)

        # Path of the temp local file
        self.temp_path = None

        # Stores the temp local FileIO object
        self.__file_io = None

        # Cache the size information
        # TODO: use cached property
        self.__size = None

    @property
    def size(self):
        if not self.__size:
            if self.__file_io:
                return os.fstat(self.__file_io.fileno).st_size
            self.__size = self.get_size()
        return self.__size

    def seek(self, pos, whence=0):
        if self.__file_io:
            self._offset = self.__file_io.seek(pos, whence)
            return self._offset
        return self._seek(pos, whence)

    def tell(self):
        if self.__file_io:
            self._offset = self.__file_io.tell()
        return self._offset

    def local(self):
        """Creates a local copy of the file.
        """
        if not self.__file_io:
            file_obj = self.create_temp_file()
            # Download file if appending or updating
            if self.exists() and ('a' in self.mode or '+' in self.mode):
                self.download(file_obj)
            # Close the temp file and open it with FileIO
            file_obj.close()
            mode = "".join([c for c in self.mode if c in "rw+ax"])
            self.__file_io = FileIO(file_obj.name, mode)
            self.temp_path = file_obj.name
        return self

    def read(self, size=None):
        """Reads the file from the Google Cloud bucket to memory

        Returns: Bytes containing the contents of the file.
        """
        start = self.tell()
        if self.__file_io:
            self.__file_io.seek(start)
            b = self.__file_io.read(size)
        else:
            if not self.exists():
                raise FileNotFoundError("File %s does not exists." % self.uri)
            file_size = self.size
            # TODO: size unknown?
            if not file_size:
                return b""
            if start >= file_size:
                return b""
            end = file_size - 1
            if size:
                end = start + size - 1
            if end > file_size - 1:
                end = file_size - 1
            # logger.debug("Reading from %s to %s" % (start, end))
            b = self.read_bytes(start, end)
        self._offset += len(b)
        return b

    def write(self, b):
        """Writes data into the file.

        Args:
            b: Bytes data

        Returns: The number of bytes written into the file.

        """
        if self.closed:
            raise ValueError("write to closed file %s" % self.uri)
        # Create a temp local file
        self.local()
        # Write data from buffer to file
        self.__file_io.seek(self.tell())
        size = self.__file_io.write(b)
        self._offset += size
        return size

    def __rm_temp(self):
        if self.temp_path and os.path.exists(self.temp_path):
            os.unlink(self.temp_path)
        logger.debug("Deleted temp file %s of %s" % (self.temp_path, self.uri))
        self.temp_path = None
        return

    def open(self, mode='r', *args, **kwargs):
        """Opens the file for writing
        """
        if not self._closed:
            self.close()
        super().open(mode)
        self._closed = False
        # Reset offset position when open
        self.seek(0)
        if 'a' in self.mode:
            # Move to the end of the file if open in appending mode.
            self.seek(0, 2)
        elif 'w' in self.mode:
            # Create empty local file
            self.local()
        return self

    def close(self):
        """Flush and close the file.
        This method has no effect if the file is already closed.
        """

        if self._closed:
            return

        if self.__file_io:
            if not self.__file_io.closed:
                self.__file_io.close()
            self.__file_io = None

        if self.temp_path:
            logger.debug("Uploading file to %s" % self.uri)
            with open(self.temp_path, 'rb') as f:
                self.upload(f)
            # Remove __temp_file if it exists.
            self.__rm_temp()
            # Set _closed attribute
            self._closed = True

    @property
    def updated_time(self):
        raise NotImplementedError()

    def exists(self):
        raise NotImplementedError()

    def get_size(self):
        raise NotImplementedError()

    def delete(self):
        raise NotImplementedError()

    def upload(self, from_file_obj):
        raise NotImplementedError()

    def download(self, to_file_obj):
        """Downloads the data to a file object
        Caution: This method does not call flush()
        """
        raise NotImplementedError()

    def read_bytes(self, start, end):
        """Reads bytes from position start to position end, inclusive
        """
        raise NotImplementedError()
Ejemplo n.º 30
0
def my_close(file:io.FileIO, byar:bytearray):
	file.seek(-1*len(byar), io.SEEK_CUR)
	file.write(byar)
	file.close()
def _save_results(student_test_suite: ag_models.StudentTestSuite,
                  submission: ag_models.Submission,
                  setup_run_result: CompletedCommand,
                  student_tests: List[str],
                  discarded_tests: List[str],
                  invalid_tests: List[str],
                  timed_out_tests: List[str],
                  bugs_exposed: List[str],
                  get_test_names_run_result: CompletedCommand = None,
                  validity_check_stdout: FileIO = None,
                  validity_check_stderr: FileIO = None,
                  buggy_impls_stdout: FileIO = None,
                  buggy_impls_stderr: FileIO = None):
    result_kwargs = {
        'student_tests': student_tests,
        'discarded_tests': discarded_tests,
        'invalid_tests': invalid_tests,
        'timed_out_tests': timed_out_tests,
        'bugs_exposed': bugs_exposed
    }
    result = ag_models.StudentTestSuiteResult.objects.update_or_create(
        defaults=result_kwargs,
        student_test_suite=student_test_suite,
        submission=submission)[0]  # type: ag_models.StudentTestSuiteResult

    if setup_run_result is not None:
        setup_result = ag_models.AGCommandResult.objects.validate_and_create(
            return_code=setup_run_result.return_code,
            timed_out=setup_run_result.timed_out,
            stdout_truncated=setup_run_result.stdout_truncated,
            stderr_truncated=setup_run_result.stderr_truncated
        )  # type: ag_models.AGCommandResult

        with open(setup_result.stdout_filename, 'wb') as f:
            shutil.copyfileobj(setup_run_result.stdout, f)

        with open(setup_result.stderr_filename, 'wb') as f:
            shutil.copyfileobj(setup_run_result.stderr, f)

        result.setup_result = setup_result
        result.save()

    if get_test_names_run_result is not None:
        result.get_test_names_result.return_code = get_test_names_run_result.return_code
        result.get_test_names_result.timed_out = get_test_names_run_result.timed_out
        result.get_test_names_result.save()
        with open(result.get_test_names_result.stdout_filename, 'wb') as f:
            get_test_names_run_result.stdout.seek(0)
            shutil.copyfileobj(get_test_names_run_result.stdout, f)
        with open(result.get_test_names_result.stderr_filename, 'wb') as f:
            get_test_names_run_result.stderr.seek(0)
            shutil.copyfileobj(get_test_names_run_result.stderr, f)

    if validity_check_stdout is not None:
        validity_check_stdout.seek(0)
        with open(result.validity_check_stdout_filename, 'wb') as f:
            shutil.copyfileobj(validity_check_stdout, f)
    if validity_check_stderr is not None:
        validity_check_stderr.seek(0)
        with open(result.validity_check_stderr_filename, 'wb') as f:
            shutil.copyfileobj(validity_check_stderr, f)
    if buggy_impls_stdout is not None:
        buggy_impls_stdout.seek(0)
        with open(result.grade_buggy_impls_stdout_filename, 'wb') as f:
            shutil.copyfileobj(buggy_impls_stdout, f)
    if buggy_impls_stderr is not None:
        buggy_impls_stderr.seek(0)
        with open(result.grade_buggy_impls_stderr_filename, 'wb') as f:
            shutil.copyfileobj(buggy_impls_stderr, f)
Ejemplo n.º 32
0
class FileRestoreResponse(RestoreResponse):

    BODY_TAG_SUFFIX = '-body'
    EXTENSION = 'xml'

    def __init__(self, username=None, items=False):
        super(FileRestoreResponse, self).__init__(username, items)
        self.filename = path.join(settings.RESTORE_PAYLOAD_DIR or tempfile.gettempdir(), uuid4().hex)

        self.response_body = FileIO(self.get_filename(self.BODY_TAG_SUFFIX), 'w+')

    def get_filename(self, suffix=None):
        return "{filename}{suffix}.{ext}".format(
            filename=self.filename,
            suffix=suffix or '',
            ext=self.EXTENSION
        )

    def __add__(self, other):
        if not isinstance(other, FileRestoreResponse):
            raise NotImplemented()

        response = FileRestoreResponse(self.username, self.items)
        response.num_items = self.num_items + other.num_items

        self.response_body.seek(0)
        other.response_body.seek(0)

        shutil.copyfileobj(self.response_body, response.response_body)
        shutil.copyfileobj(other.response_body, response.response_body)

        return response

    def finalize(self):
        """
        Creates the final file with start and ending tag
        """
        with open(self.get_filename(), 'w') as response:
            # Add 1 to num_items to account for message element
            items = self.items_template.format(self.num_items + 1) if self.items else ''
            response.write(self.start_tag_template.format(
                items=items,
                username=self.username,
                nature=ResponseNature.OTA_RESTORE_SUCCESS
            ))

            self.response_body.seek(0)
            shutil.copyfileobj(self.response_body, response)

            response.write(self.closing_tag)
        
        self.finalized = True
        self.close()

    def get_cache_payload(self, full=False):
        return {
            'is_file': True,
            'data': self.get_filename() if not full else open(self.get_filename(), 'r')
        }

    def as_string(self):
        with open(self.get_filename(), 'r') as f:
            return f.read()

    def get_http_response(self):
        return stream_response(self.get_filename())
Ejemplo n.º 33
0
class File(RawIOBase):
    'Create a file object wrapping an e[x]ploded zip file'

    HEADER = 0
    DATA = 1
    DESCRIPTOR = 2
    DIRECTORY = 3

    def __init__(self, path, flags, info, fh=None, base='.', depth=0):
        super(File, self).__init__()

        self.path = path
        self.flags = flags
        self.fh = fh

        self.info = info
        self.depth = depth
        self.cursor = 0
        self.offset = 0
        self.state = File.HEADER

        # stream item info
        self.stream_offset = 0
        self.zip_header = b''
        self.descriptor = b''

        # data file info
        self.data = None
        self.data_name = ''
        self.data_len = 0

        # streams
        prefix = os.path.join(base, 'meta', os.path.basename(path))
        self.stream = FileIO(prefix + '.stream', 'rb')
        self.dir = FileIO(prefix + '.dir', 'rb')
        self.data_dir = os.path.join(base, 'data')

        # init
        self._load_stream_item()
        self.lock = threading.Lock()

    def _load_stream_item(self):
        'Sets the next stream item as current.'

        if self.data:
            self.data.close()
            self.data = None

        # open the header so we can know the data file to open, and the
        # length of the var fields
        raw_header = self.stream.read(STREAM_ITEM.size)
        header = StreamItem._make(STREAM_ITEM.unpack(raw_header))

        var_fields = header.filename_len + header.extra_field_len
        # I would think that b2a_hex should decode the raw bytes...
        sha1 = b2a_hex(header.sha).decode('ascii')

        # only save the zip part of the header
        self.zip_header = (raw_header[:HEADER_DIFF] +
                           self.stream.read(var_fields))

        self.descriptor = self.stream.read(header.descriptor_len)

        self.data_name = path.join(*([self.data_dir] +
                                     list(sha1[:self.depth]) + [sha1]))

    def _open_data_file(self):
        self.data = FileIO(self.data_name, 'rb')
        self.data_len = self.data.seek(0, 2)
        self.data.seek(0)

    def close(self):
        self.stream.close()
        self.dir.close()
        if self.data: self.data.close()

    def fileno(self):
        return self.fh

    def isatty(self):
        return False

    def read(self, count=-1):
        if count < 0: return self.readall()
        elif count == 0: return b''

        state = self.state
        if state == File.HEADER:
            previous_offset = self.offset
            self.offset += count

            result = self.zip_header[previous_offset:self.offset]
            self.cursor += len(result)

            if self.offset >= len(self.zip_header):
                self.state = File.DATA
                if not self.data: self._open_data_file()

            return result

        elif state == File.DATA:
            result = self.data.read(count)
            self.cursor += len(result)

            if self.data.tell() >= self.data_len:
                self.state = File.DESCRIPTOR
                self.offset = 0

            # empty data file (state will now be DESCRIPTOR)
            if not result: return self.read(count)

            return result

        elif state == File.DESCRIPTOR:
            previous_offset = self.offset
            self.offset += count

            result = self.descriptor[previous_offset:self.offset]
            self.cursor += len(result)

            if self.offset >= len(self.descriptor):
                if self.cursor >= self.info.directory_offset:
                    self.state = File.DIRECTORY
                    self.dir.seek(0)
                    self.stream_offset = None

                    if self.data:
                        self.data.close()
                        self.data = None

                else:
                    self.state = File.HEADER
                    self.offset = 0
                    self.stream_offset = self.stream.tell()
                    self._load_stream_item()

            # descriptor is optional (state will now be HEADER or DIRECTORY)
            if not result: return self.read(count)

            return result
        elif state == File.DIRECTORY:
            result = self.dir.read(count)
            self.cursor += len(result)

            return result
        else:
            raise RuntimeError('Invalid state: %r' % self.state)

    def readable(self):
        return True

    def readinto(self, b):
        count = len(b)
        if count == 0: return 0

        state = self.state
        if state == File.HEADER:
            header_len = len(self.zip_header)
            previous_offset = self.offset

            current_offset = self.offset = \
                    min(previous_offset + count, header_len)

            read = current_offset - previous_offset
            b[:read] = self.zip_header[previous_offset:current_offset]
            self.cursor += read

            if current_offset == header_len:
                self.state = File.DATA
                if not self.data: self._open_data_file()

            return read

        elif state == File.DATA:
            read = self.data.readinto(b)
            self.cursor += read

            if self.data.tell() >= self.data_len:
                self.state = File.DESCRIPTOR
                self.offset = 0

            # empty data file (state will now be DESCRIPTOR)
            if not read: return self.readinto(b)

            return read

        elif state == File.DESCRIPTOR:
            descriptor_len = len(self.descriptor)
            previous_offset = self.offset

            current_offset = self.offset = \
                    min(previous_offset + count, descriptor_len)

            read = current_offset - previous_offset
            b[:read] = self.descriptor[previous_offset:current_offset]
            self.cursor += read

            if current_offset == descriptor_len:
                if self.cursor >= self.info.directory_offset:
                    self.state = File.DIRECTORY
                    self.dir.seek(0)
                    self.stream_offset = None

                    if self.data:
                        self.data.close()
                        self.data = None

                else:
                    self.state = File.HEADER
                    self.offset = 0
                    self.stream_offset = self.stream.tell()
                    self._load_stream_item()

            # descriptor is optional (state will now be HEADER or DIRECTORY)
            if not read: return self.readinto(b)

            return read
        elif state == File.DIRECTORY:
            read = self.dir.readinto(b)
            self.cursor += read

            return read
        else:
            raise RuntimeError('Invalid state: %r' % self.state)

    def seek(self, pos, offset=0):
        if offset == 1:
            pos += self.cursor
        elif offset == 2:
            pos += self.info.filesize

        if pos == self.cursor: return pos
        self.cursor = pos

        # skip directly to the central directory
        if pos >= self.info.directory_offset:
            if self.data:
                self.data.close()
                self.data = None

            self.state = File.DIRECTORY
            self.stream_offset = None
            self.dir.seek(pos - self.info.directory_offset)
            return pos

        # calculate the offset into the stream file
        z_offset, s_offset = self.info.jump_tree.find(pos).location
        additional = pos - z_offset

        # we're looking at a different data file
        # (load local header into memory)
        if s_offset != self.stream_offset:
            self.stream_offset = s_offset
            self.stream.seek(s_offset)
            self._load_stream_item()

        header_len = len(self.zip_header)
        if additional < header_len:
            self.state = File.HEADER
            self.offset = additional
            return pos

        # assume currently in the data file
        additional -= header_len
        self.state = File.DATA

        # if the file hasn't been opened yet, open it and find its size
        if not self.data: self._open_data_file()

        if additional < self.data_len:
            self.data.seek(additional)
        else:
            self.state = File.DESCRIPTOR
            self.offset = additional - self.data_len

        return pos

    def seekable(self):
        return True

    def tell(self):
        return self.cursor

    def writeable(self):
        return False