示例#1
0
    def worker(self, timeInterval, keepWorking):
        #  keeps looking at the queue and empties it to the database
        # keepWorking is an event set in the main thread to allow stoping
        while keepWorking.is_set():
            self.datafile = path + 'DavisVP2_' + datetime.datetime.now().strftime('%m-%d-%y') + '.xlsx'
            self.logfile = path + 'status_' + datetime.datetime.now().strftime('%m-%d-%y') + '.log'
            try:
                if os.path.exists(self.logfile):
                    self.fd = BufferedWriter(FileIO(self.logfile, 'ab'))
                else:
                    self.fd = BufferedWriter(FileIO(self.logfile, 'wb'))

            except IOError:
                self.log.error("Cannot create log file %s" % self.logfile)
            try:
                if os.path.exists(self.datafile):
                    self.fd = BufferedWriter(FileIO(self.logfile, 'ab'))
                else:
                    self.fd = BufferedWriter(FileIO(self.logfile, 'wb'))

            except IOError:
                self.log.error("Cannot create data file %s" % self.datafile)

            self.check_conditions(self.datafile)
            time.sleep(timeInterval)
示例#2
0
文件: fabioutils.py 项目: aglie/fabio
    def __init__(self, name, mode="rb", buffering=0):
        """file(name[, mode[, buffering]]) -> file object

        Open a file.  The mode can be 'r', 'w' or 'a' for reading (default),
        writing or appending.  The file will be created if it doesn't exist
        when opened for writing or appending; it will be truncated when
        opened for writing.  Add a 'b' to the mode for binary files.
        Add a '+' to the mode to allow simultaneous reading and writing.
        If the buffering argument is given, 0 means unbuffered, 1 means line
        buffered, and larger numbers specify the buffer size.  The preferred way
        to open a file is with the builtin open() function.
        Add a 'U' to mode to open the file for input with universal newline
        support.  Any line ending in the input file will be seen as a '\n'
        in Python.  Also, a file so opened gains the attribute 'newlines';
        the value for this attribute is one of None (no newline read yet),
        '\r', '\n', '\r\n' or a tuple containing all the newline types seen.

        'U' cannot be combined with 'w' or '+' mode.
        """
        if six.PY2:
            FileIO.__init__(self, name, mode, buffering)
        else:  # for python3 we drop buffering
            FileIO.__init__(self, name, mode)
        self.lock = _Semaphore()
        self.__size = None
示例#3
0
    def __init__(self, username=None, items=False):
        super(FileRestoreResponse, self).__init__(username, items)
        self.filename = os.path.join(settings.SHARED_DRIVE_CONF.restore_dir,
                                     uuid4().hex)

        self.response_body = FileIO(self.get_filename(self.BODY_TAG_SUFFIX),
                                    'w+')
示例#4
0
def _parse_config_file_or_exit(config_file: io.FileIO) -> Dict:
    experiment_config = yaml.safe_load(config_file.read())
    config_file.close()
    if not experiment_config or not isinstance(experiment_config, dict):
        print("Error: invalid experiment config file {}".format(config_file.name))
        sys.exit(1)
    return experiment_config
示例#5
0
 def get_from_file_memory_duplicate(path):
     io = FileIO(path,'rb')
     io2 = StringIO()
     io2.write(io.read())
     io.close()
     io2.seek(0, os.SEEK_SET)
     return ELF(io2)
示例#6
0
    def __init__(self, name: str, mode: str = 'r', root: str = None, mkdir: bool = True, *args, **kwargs):
        """Open a local file.

        Args:
            name: Name of file.
            mode: Open mode.
            root: Root to prefix name with for absolute path in filesystem.
            mkdir: Whether or not to create non-existing paths automatically.
        """

        # no root given?
        if root is None:
            raise ValueError('No root directory given.')

        # filename is not allowed to start with a / or contain ..
        if name.startswith('/') or '..' in name:
            raise ValueError('Only files within root directory are allowed.')

        # build filename
        self.filename = name
        full_path = os.path.join(root, name)

        # need to create directory?
        path = os.path.dirname(full_path)
        if not os.path.exists(path):
            if mkdir:
                os.makedirs(path)
            else:
                raise ValueError('Cannot write into sub-directory with disabled mkdir option.')

        # init FileIO
        FileIO.__init__(self, full_path, mode)
示例#7
0
    def __init__(self, filename, nstates, natoms, vendor='PyMOL', box=None):
        file.__init__(self, filename, 'wb')
        self.natoms = natoms
        self.fmt = '%df' % (natoms)
        charmm = int(nstates > 0)

        # Header
        fmt='4s 9i d 9i'
        header = [b'CORD', # 4s
                nstates, 1, 1, 0, 0, 0, 0, 0, 0, # 9i
                1.0, # d
                0, 0, 0, 0, 0, 0, 0, 0, 0, # 9i
                ]
        if charmm:
            # DELTA is stored as a double with X-PLOR but as a float with CHARMm
            fmt = '4s 9i f 10i'
            header.append(24) # dummy charmm version number
        self.writeFortran(header,fmt)
     
        # Title
        fmt = 'i80s80s'
        title = [2, # 1i
                b'* TITLE'.ljust(80), # 80s
                (b'* Created by ' + vendor.encode()).ljust(80), # 80s
                ]
        self.writeFortran(title,fmt,length=160+4)
 
        # NATOM
        self.writeFortran([natoms],'i')
示例#8
0
    def __init__(self,
                 sampling_rate: int,
                 mffdir: str,
                 data_type: str = 'EEG'):
        """
        **Parameters**

        * **`sampling_rate`**: sampling rate of all channels.  Sampling rate
        has to fit in a 3-byte integer.  See docs in `mffpy.header_block`.

        * **`data_type`**: name of the type of signal.

        * **`mffdir`**: directory of the mff recording to stream data to.

        **Notes**

        Because we are streaming the recording to disk, the folder into which
        it is to be saved must have been created prior to the initialization of
        this class.
        """

        super().__init__(sampling_rate, data_type)
        filename = self.default_filename_fmt % 1
        self.check_compatibility(filename)
        self.stream = FileIO(join(mffdir, filename), mode='w')
示例#9
0
 async def gain_xp(self, message):
     user = message.author
     id = user.id
     if self.check_joined(id):
         if id in self.gettingxp:
             seconds = abs(self.gettingxp[id] - int(time.perf_counter()))
             if seconds >= self.cooldown:
                 self.add_xp(id)
                 self.gettingxp[id] = int(time.perf_counter())
                 FileIO("data/levels/leader_board.json", "save",
                        self.leader_board)
             if self.leader_board[user.id]["XP"] >= self.get_level_xp(
                     self.leader_board[user.id]["rank"]):
                 self.leader_board[user.id]["rank"] += 1
                 self.leader_board[user.id]["XP"] = 0
                 msg = '{} **has leveled up and is now level {}!!!\n HURRAY!!**'
                 msg = msg.format(message.author.display_name,
                                  self.leader_board[user.id]["rank"])
                 await self.bot.send_message(message.channel, msg)
                 FileIO("data/levels/leader_board.json", "save",
                        self.leader_board)
         else:
             self.add_xp(id)
             self.gettingxp[id] = int(time.perf_counter())
             FileIO("data/levels/leader_board.json", "save",
                    self.leader_board)
 def wrapper(self, file_, *args, **kwargs):
     if isinstance(file_, basestring):
         # Using FileIO here instead of open()
         # to be able to override the filename
         # which is later used when uploading the file.
         #
         # Explanation:
         #
         # 1) Restkit reads the filename
         # from "name" attribute of a file-like object,
         # there is no other way to specify a filename;
         #
         # 2) The attribute may contain the full path to file,
         # which does not work well as a filename;
         #
         # 3) The attribute is readonly when using open(),
         # unlike FileIO object.
         file_ = FileIO(file_, 'rb')
         file_.name = path.basename(file_.name)
     if hasattr(file_, 'read'):
         # A file-like object must have 'read' method
         return fn(self, file_, *args, **kwargs)
     else:
         raise TypeError('Expected either a string '
                         'containing a path to file or a '
                         'file-like object, got {}'.format(type(file_)))
示例#11
0
def write_to_file(file: io.FileIO, data: bytes, start: int = 0):
    length_to_write = len(data)
    file.seek(start)
    written = 0
    while written < length_to_write:
        written += file.write(data[written:])
    os.fsync(file.fileno())
示例#12
0
def get_header(raw_file: io.FileIO) -> Header:
    """Get the raw file header from an open ATOP file.

    Args:
        raw_file: An open ATOP file capable of reading as bytes.

    Returns:
        raw_header: The header at the beginning of an ATOP file.

    Raises:
        ValueError: If there are not enough bytes to read the header, or the bytes were invalid.
    """
    # Read the header directly into the struct, there is no padding to consume or add.
    # Use default Header as the baseline in order to check the version. It can be transferred without re-reading.
    header = _HEADER_BY_VERSION[_DEFAULT_VERSION]()
    raw_file.readinto(header)

    if header.magic != MAGIC:
        msg = f'File does not contain raw atop output (wrong magic number): {hex(header.magic)}'
        raise ValueError(msg)

    header_version = header.get_version()
    if header_version != _DEFAULT_VERSION and header_version in _HEADER_BY_VERSION:
        # Header byte length is consistent across versions. Transfer the initial read into the versioned header.
        header = _HEADER_BY_VERSION[header_version].from_buffer(header)

    # Ensure all struct lengths match the lengths specific in the header. If not, we cannot read the file further.
    header.check_compatibility()

    return header
示例#13
0
 def assert_file(our_file: io.FileIO, passed_file: io.FileIO):
     our_readable = our_file.readable()
     got_mode = passed_file.mode
     our_stat = os.fstat(our_file.fileno())
     passed_stat = os.fstat(passed_file.fileno())
     is_fifo = stat.S_ISFIFO(passed_stat.st_mode)
     expected_mode = "wb" if our_readable else "rb"
     reader = our_file if our_readable else passed_file
     writer = passed_file if our_readable else our_file
     # Verify that we have a pipe with its two ends
     if is_fifo and our_stat == passed_stat:
         pipe_size = fcntl.fcntl(writer.fileno(), F_GETPIPE_SZ)
         # Check for pending bytes in the pipe
         pending_bytes = bytearray(SIZEOF_INT)
         fcntl.ioctl(reader.fileno(), termios.FIONREAD, pending_bytes)
         pending_bytes = struct.unpack_from("=i", pending_bytes)[0]
         test_size = min(mmap.PAGESIZE, pipe_size - pending_bytes)
         expected_bytes = random.randbytes(test_size)
         writer.write(expected_bytes)
         writer.flush()
         got_bytes = reader.read(pipe_size)
     else:
         expected_bytes = None
         got_bytes = None
     assert (got_mode, is_fifo, passed_stat,
             got_bytes) == (expected_mode, True, our_stat, expected_bytes)
示例#14
0
 def wrapper(self, file_, *args, **kwargs):
     if isinstance(file_, basestring):
         # Using FileIO here instead of open()
         # to be able to override the filename
         # which is later used when uploading the file.
         #
         # Explanation:
         #
         # 1) Restkit reads the filename
         # from "name" attribute of a file-like object,
         # there is no other way to specify a filename;
         #
         # 2) The attribute may contain the full path to file,
         # which does not work well as a filename;
         #
         # 3) The attribute is readonly when using open(),
         # unlike FileIO object.
         file_ = FileIO(file_, 'rb')
         file_.name = path.basename(file_.name)
     if hasattr(file_, 'read'):
         # A file-like object must have 'read' method
         return fn(self, file_, *args, **kwargs)
     else:
         raise TypeError('Expected either a string '
                         'containing a path to file or a '
                         'file-like object, got {}'.format(type(file_)))
示例#15
0
 def New(self, request, context):
     result = fms.ReturnStatus()
     try:
         if request.type == PATH_FILE:
             new_file = FileIO(request.path, "w+")
             new_file.close()
         elif request.type == PATH_DIR:
             os.mkdir(request.path)
         elif request.type == PATH_SYMLINK:
             raise Exception("creation of symlinks not supported")
         elif request.type == PATH_PACKAGE:
             raise Exception("creation of packages not supported")
         result.code = OK
     except OSError as ose:
         result.code = OS_ERROR
         if ose.errno:
             result.error_code = ose.errno
         result.error_msg = utf8(ose.strerror)
         result.error_file = utf8(request.path)
     except IOError as ioe:
         result.code = IO_ERROR
         if ioe.errno:
             result.error_code = ioe.errno
         result.error_msg = utf8(ioe.strerror)
         result.error_file = utf8(ioe.filename)
     except Exception as err:
         result.code = ERROR
         result.error_msg = utf8(err)
         result.error_file = utf8(request.path)
     return result
示例#16
0
    def __init__(self,
                 fname,
                 mode='rb',
                 endian='<',
                 header_prec='i',
                 *args,
                 **kwargs):
        """Open a Fortran unformatted file for writing.
        
        Parameters
        ----------
        endian : character, optional
            Specify the endian-ness of the file.  Possible values are
            '>', '<', '@' and '='.  See the documentation of Python's
            struct module for their meanings.  The deafult is '>' (native
            byte order)
        header_prec : character, optional
            Specify the precision used for the record headers.  Possible
            values are 'h', 'i', 'l' and 'q' with their meanings from
            Python's struct module.  The default is 'i' (the system's
            default integer).

        """
        m = mode
        file.__init__(self, fname, mode=m, *args, **kwargs)
        self.ENDIAN = endian
        self.HEADER_PREC = header_prec
示例#17
0
    def __init__(self, name, mode="rb", buffering=0, temporary=False):
        """file(name[, mode[, buffering]]) -> file object

        Open a file.  The mode can be 'r', 'w' or 'a' for reading (default),
        writing or appending.  The file will be created if it doesn't exist
        when opened for writing or appending; it will be truncated when
        opened for writing.  Add a 'b' to the mode for binary files.
        Add a '+' to the mode to allow simultaneous reading and writing.
        If the buffering argument is given, 0 means unbuffered, 1 means line
        buffered, and larger numbers specify the buffer size.  The preferred way
        to open a file is with the builtin open() function.
        Add a 'U' to mode to open the file for input with universal newline
        support.  Any line ending in the input file will be seen as a '\n'
        in Python.  Also, a file so opened gains the attribute 'newlines';
        the value for this attribute is one of None (no newline read yet),
        '\r', '\n', '\r\n' or a tuple containing all the newline types seen.

        'U' cannot be combined with 'w' or '+' mode.

        :param temporary: if True, destroy file at close.
        """
        if six.PY2:
            FileIO.__init__(self, name, mode, buffering)
        else:  # for python3 we drop buffering
            FileIO.__init__(self, name, mode)
        self.lock = _Semaphore()
        self.__size = None
        self.__temporary = temporary
示例#18
0
    def __init__(self, inheritable=False, nonblocking=False):
        """File-like object wrapping ``inotify_init1()``. Raises ``OSError`` on failure.
        :func:`~inotify_simple.INotify.close` should be called when no longer needed.
        Can be used as a context manager to ensure it is closed, and can be used
        directly by functions expecting a file-like object, such as ``select``, or with
        functions expecting a file descriptor via
        :func:`~inotify_simple.INotify.fileno`.

        Args:
            inheritable (bool): whether the inotify file descriptor will be inherited by
                child processes. The default,``False``, corresponds to passing the
                ``IN_CLOEXEC`` flag to ``inotify_init1()``. Setting this flag when
                opening filedescriptors is the default behaviour of Python standard
                library functions since PEP 446.

            nonblocking (bool): whether to open the inotify file descriptor in
                nonblocking mode, corresponding to passing the ``IN_NONBLOCK`` flag to
                ``inotify_init1()``. This does not affect the normal behaviour of
                :func:`~inotify_simple.INotify.read`, which uses ``poll()`` to control
                blocking behaviour according to the given timeout, but will cause other
                reads of the file descriptor (for example if the application reads data
                manually with ``os.read(fd)``) to raise ``BlockingIOError`` if no data
                is available."""
        global _libc
        _libc = _libc or cdll.LoadLibrary('libc.so.6')
        flags = (not inheritable) * CLOEXEC | bool(nonblocking) * NONBLOCK
        FileIO.__init__(self,
                        _libc_call(_libc.inotify_init1, flags),
                        mode='rb')
        self._poller = poll()
        self._poller.register(self.fileno())
示例#19
0
文件: fs.py 项目: terencehonles/xzip
    def __init__(self, path, flags, info, fh=None, base='.', depth=0):
        super(File, self).__init__()

        self.path = path
        self.flags = flags
        self.fh = fh

        self.info = info
        self.depth = depth
        self.cursor = 0
        self.offset = 0
        self.state = File.HEADER

        # stream item info
        self.stream_offset = 0
        self.zip_header = b''
        self.descriptor = b''

        # data file info
        self.data = None
        self.data_name = ''
        self.data_len = 0

        # streams
        prefix = os.path.join(base, 'meta', os.path.basename(path))
        self.stream = FileIO(prefix + '.stream', 'rb')
        self.dir = FileIO(prefix + '.dir', 'rb')
        self.data_dir = os.path.join(base, 'data')

        # init
        self._load_stream_item()
        self.lock = threading.Lock()
示例#20
0
 def get_from_file_memory_duplicate(path):
     io = FileIO(path, 'rb')
     io2 = StringIO()
     io2.write(io.read())
     io.close()
     io2.seek(0, os.SEEK_SET)
     return ELF(io2)
示例#21
0
class StreamingBinWriter(BinWriter):
    """
    Subclass of BinWriter to support streaming bin file to disk.
    """
    def __init__(self,
                 sampling_rate: int,
                 mffdir: str,
                 data_type: str = 'EEG'):
        """
        **Parameters**

        * **`sampling_rate`**: sampling rate of all channels.  Sampling rate
        has to fit in a 3-byte integer.  See docs in `mffpy.header_block`.

        * **`data_type`**: name of the type of signal.

        * **`mffdir`**: directory of the mff recording to stream data to.

        **Notes**

        Because we are streaming the recording to disk, the folder into which
        it is to be saved must have been created prior to the initialization of
        this class.
        """

        super().__init__(sampling_rate, data_type)
        filename = self.default_filename_fmt % 1
        self.check_compatibility(filename)
        self.stream = FileIO(join(mffdir, filename), mode='w')

    def write(self, filename: str, *args, **kwargs):
        # Because the recording has been streamed to a file, all that is
        # required here is closing the stream
        self.stream.close()
示例#22
0
    def __init__(self,
                 name: str = None,
                 mode: str = 'r',
                 prefix: str = None,
                 suffix: str = None,
                 root: str = '/tmp/pyobs/',
                 mkdir: bool = True,
                 *args,
                 **kwargs):
        """Open/create a temp file.

        Args:
            name: Name of file.
            mode: Open mode.
            prefix: Prefix for automatic filename creation in write mode.
            suffix: Suffix for automatic filename creation in write mode.
            root: Temp directory.
            mkdir: Whether to automatically create directories.
        """
        # no root given?
        if root is None:
            raise ValueError('No root directory given.')

        # create root?
        if not os.path.exists(root):
            os.makedirs(root)

        # no filename?
        if name is None:
            # cannot read from non-existing filename
            if 'r' in mode:
                raise ValueError('No filename given to read from.')

            # create new temp file name
            with NamedTemporaryFile(mode=mode,
                                    prefix=prefix,
                                    suffix=suffix,
                                    dir=root) as tmp:
                name = os.path.basename(tmp.name)

        # filename is not allowed to start with a / or contain ..
        if name.startswith('/') or '..' in name:
            raise ValueError('Only files within root directory are allowed.')

        # build filename
        self.filename = name
        full_name = os.path.join(root, name)

        # need to create directory?
        path = os.path.dirname(full_name)
        if not os.path.exists(path):
            if mkdir:
                os.makedirs(path)
            else:
                raise ValueError(
                    'Cannot write into sub-directory with disabled mkdir option.'
                )

        # init FileIO
        FileIO.__init__(self, full_name, mode)
示例#23
0
    def __init__(self, filename, nstates=-1, natoms=-1, vendor="PyMOL", box=None):
        file.__init__(self, filename, "w")
        self.natoms = natoms
        self.box = box

        # Write Trajectory Header Information
        print("TITLE : Created by %s with %d atoms" % (vendor, natoms), file=self)
示例#24
0
    def __init__(self, stream: io.FileIO):
        assert isinstance(stream, (io.FileIO, io.TextIOWrapper))

        # NOTE pysbs requires a filepath
        stream.close()
        self.path = stream.name
        self.sbs = None
示例#25
0
class FileRestoreResponse(RestoreResponse):

    BODY_TAG_SUFFIX = "-body"
    EXTENSION = "xml"

    def __init__(self, username=None, items=False):
        super(FileRestoreResponse, self).__init__(username, items)
        self.filename = os.path.join(settings.SHARED_DRIVE_CONF.restore_dir, uuid4().hex)

        self.response_body = FileIO(self.get_filename(self.BODY_TAG_SUFFIX), "w+")

    def get_filename(self, suffix=None):
        return "{filename}{suffix}.{ext}".format(filename=self.filename, suffix=suffix or "", ext=self.EXTENSION)

    def __add__(self, other):
        if not isinstance(other, FileRestoreResponse):
            raise NotImplemented()

        response = FileRestoreResponse(self.username, self.items)
        response.num_items = self.num_items + other.num_items

        self.response_body.seek(0)
        other.response_body.seek(0)

        shutil.copyfileobj(self.response_body, response.response_body)
        shutil.copyfileobj(other.response_body, response.response_body)

        return response

    def finalize(self):
        """
        Creates the final file with start and ending tag
        """
        with open(self.get_filename(), "w") as response:
            # Add 1 to num_items to account for message element
            items = self.items_template.format(self.num_items + 1) if self.items else ""
            response.write(
                self.start_tag_template.format(
                    items=items, username=self.username, nature=ResponseNature.OTA_RESTORE_SUCCESS
                )
            )

            self.response_body.seek(0)
            shutil.copyfileobj(self.response_body, response)

            response.write(self.closing_tag)

        self.finalized = True
        self.close()

    def get_cache_payload(self, full=False):
        return {"data": self.get_filename() if not full else open(self.get_filename(), "r")}

    def as_string(self):
        with open(self.get_filename(), "r") as f:
            return f.read()

    def get_http_response(self):
        headers = {"Content-Length": os.path.getsize(self.get_filename())}
        return stream_response(open(self.get_filename(), "r"), headers)
示例#26
0
 def isatty(self):
     # TODO: Couldn't we just subclass FileIO?
     f = FileIO(self._fileno, 'r', False)
     try:
         return f.isatty()
     finally:
         f.close()
示例#27
0
async def func(filepath, size):
    if os_path.getsize(filepath) <= size:
        with FileIO(filepath, 'rb') as f:
            f.name = os_path.basename(filepath)
            f.path = filepath
            yield f
    else:
        file_ext = os_path.splitext(filepath)[1]
        video_units = ['.mp4','.mkv','.avi','.webm','.wmv','.mov','.m3u8']
        if file_ext in video_units:
            async for splitted_video in video(filepath, size):
                with FileIO(splitted_video, 'rb') as f:
                    f.name = os_path.basename(splitted_video)
                    f.path = splitted_video
                    yield f
                os_remove(splitted_video)
        else:
            total_size = os_path.getsize(filepath)
            pos = 0
            index = 0
            while pos < total_size:
                index += 1
                with IOHandler.ChunkIO(filepath, pos, size) as f:
                    pos += size
                    f.name = os_path.basename(filepath) + f'.{index:03d}'
                    yield f
示例#28
0
    def assertFlippedBit(self, file_orig, file_modded, position):
        len_orig = os.path.getsize(file_orig)
        len_modded = os.path.getsize(file_modded)
        self.assertEqual(len_orig, len_modded, "Files of different sizes")

        f_o = FileIO(file_orig, "r+b")
        f_m = FileIO(file_modded, "r+b")

        for i in xrange(len_orig):
            # read in a byte from each file and compare
            b_o = ord(f_o.read(1))
            b_m = ord(f_m.read(1))
            if i == (position / 8):
                for m in xrange(8):
                    bit_m = BitwiseAnalyser.BitManipulator.getBitFromByteAt(
                        b_m, m)
                    bit_o = BitwiseAnalyser.BitManipulator.getBitFromByteAt(
                        b_o, m)
                    if m == (position % 8):
                        self.assertNotEqual(
                            bit_m, bit_o,
                            "Bits are equal when the should be different at position: "
                            + str(position))
                    else:
                        self.assertEqual(
                            bit_m, bit_o,
                            "Bits are incorrectly different at position " +
                            str(i))
            else:
                self.assertEqual(
                    b_o, b_m,
                    "Bytes differ (when the shouldn't) at position " + str(i))
        f_o.close()
        f_m.close()
示例#29
0
def write_to_file(file_fd: io.FileIO, dir_fileno: Optional[int],
                  data: bytes, fsync: bool=True):
    length_to_write = len(data)
    written = 0
    while written < length_to_write:
        written = file_fd.write(data[written:])
    if fsync:
        fsync_file_and_dir(file_fd.fileno(), dir_fileno)
示例#30
0
 def get_status(self, ep0: io.FileIO) -> None:
     buf = AuthStatusReport(type=ReportType.get_auth_status)
     buf.seq = self._seq
     buf.status = self._status
     buf.crc32 = zlib.crc32(
         bytes(buf)[:ctypes.sizeof(AuthStatusReport) -
                    ctypes.sizeof(c_uint32)])
     ep0.write(buf)  #type: ignore[arg-type]
示例#31
0
 def flipByteAt(inputfile, position):
     """Flips the bits for the byte at the specified position in the input file."""
     f = FileIO(inputfile, "r+")
     f.seek(position)
     byte = ord(f.read(1))
     f.seek(-1, 1)   # go back 1 byte from current position
     f.write(struct.pack("B", byte^0xFF))    # read in the byte and XOR it
     f.close()
示例#32
0
class HidrawDS4Device(DS4Device):
    def __init__(self, name, addr, type, hidraw_device, event_device):
        try:
            self.report_fd = os.open(hidraw_device, os.O_RDWR | os.O_NONBLOCK)
            self.fd = FileIO(self.report_fd, "rb+", closefd=False)
            self.input_device = InputDevice(event_device)
            self.input_device.grab()
        except (OSError, IOError) as err:
            raise DeviceError(err)

        self.buf = bytearray(self.report_size)

        super(HidrawDS4Device, self).__init__(name, addr, type)

    def read_report(self):
        try:
            ret = self.fd.readinto(self.buf)
        except IOError:
            return

        # Disconnection
        if ret == 0:
            return

        # Invalid report size or id, just ignore it
        if ret < self.report_size or self.buf[0] != self.valid_report_id:
            return False

        if self.type == "bluetooth":
            # Cut off bluetooth data
            buf = zero_copy_slice(self.buf, 2)
        else:
            buf = self.buf

        return self.parse_report(buf)

    def read_feature_report(self, report_id, size):
        op = HIDIOCGFEATURE(size + 1)
        buf = bytearray(size + 1)
        buf[0] = report_id

        return fcntl.ioctl(self.fd, op, bytes(buf))

    def write_report(self, report_id, data):
        if self.type == "bluetooth":
            # TODO: Add a check for a kernel that supports writing
            # output reports when such a kernel has been released.
            return

        hid = bytearray((report_id,))
        self.fd.write(hid + data)

    def close(self):
        try:
            self.fd.close()
            self.input_device.ungrab()
        except IOError:
            pass
    def __init__(self):


        self.start_flying = True
        self.stop_flying = False
        self.return_to_home = False
        self.is_takeoff = False
        # self.PID = SimplePID()

        self.drone_position_x = 0
        self.drone_position_y = 0
        self.drone_position_z = 0

        self.drone_velocity_x = 0
        self.drone_velocity_y = 0
        self.drone_velocity_z = 0

        self.drone_acceleration_x = 0
        self.drone_acceleration_y = 0
        self.drone_acceleration_z = 0

        self.target_position_x = 0
        self.target_position_y = 0
        self.target_position_z = 0

        self.target_velocity_x = 0
        self.target_velocity_y = 0
        self.target_velocity_z = 0

        self.drone_yaw = 0
        self.drone_yaw_radians = 0

        self.vx = 0
        self.vy = 0
        self.vx1 = 0
        self.vy1 = 0

        self.ax = 0
        self.ay = 0

        self.controller = BasicDroneController()
        self.subNavdata = rospy.Subscriber('/ardrone/navdata',Navdata,self.ReceiveNavdata) 
        
        self.logger = logging.getLogger('LQR_simulation')
        self.fileHandler_message = logging.StreamHandler(BufferedWriter(FileIO("LQR_simulation_data" + time.strftime("%Y%m%d-%H%M%S") + ".log", "w")))
        self.logger.addHandler(self.fileHandler_message)
        self.formatter_message = logging.Formatter('%(message)s')
        self.fileHandler_message.setFormatter(self.formatter_message)
        self.logger.setLevel(LoggerWarningLevel)
        self.logger.info('Time;target_position_x,target_position_y,target_position_z;target_velocity_x,target_velocity_y,target_velocity_z;drone_position_x,drone_position_y,drone_position_z;drone_velocity_x,drone_velocity_y,drone_velocity_z,vx1,vy1,ax,ay')

        self.logger_land = logging.getLogger('LQR_simulation_land')
        self.fileHandler_message = logging.StreamHandler(BufferedWriter(FileIO("LQR_simulation_PD_land_data" + time.strftime("%Y%m%d-%H%M%S") + ".log", "w")))
        self.logger_land.addHandler(self.fileHandler_message)
        self.formatter_message = logging.Formatter('%(message)s')
        self.fileHandler_message.setFormatter(self.formatter_message)
        self.logger_land.setLevel(LoggerWarningLevel)
        self.logger_land.info('Time;target_position_x,target_position_y,target_position_z;target_velocity_x,target_velocity_y,target_velocity_z;drone_position_x,drone_position_y,drone_position_z;drone_velocity_x,drone_velocity_y,drone_velocity_z,vx1,vy1,ax,ay')
示例#34
0
def save_q(Q, file_location):
    """Saves the current q learning values to the specified location."""
    try:
        makedirs(dirname(file_location))
    except OSError as exc:
        pass
    file = FileIO(file_location, 'w')
    pickle.dump(Q, file)
    file.close()
示例#35
0
 def close(self):
     name = self.name
     FileIO.close(self)
     if self.__temporary:
         try:
             os.unlink(name)
         except Exception as err:
             logger.error("Unable to remove %s: %s" % (name, err))
             raise (err)
示例#36
0
 def close(self):
     name = self.name
     FileIO.close(self)
     if self.__temporary:
         try:
             os.unlink(name)
         except Exception as err:
             logger.error("Unable to remove %s: %s" % (name, err))
             raise(err)
 def __init__(self, data=None):
     # Raise exception if Blobs are getting subclassed
     # refer to ZODB-Bug No.127182 by Jim Fulton on 2007-07-20
     if (self.__class__ is not Blob):
         raise TypeError('Blobs do not support subclassing.')
     self.__setstate__()
     if data is not None:
         with self.open('w') as file:
             file.write(data)
示例#38
0
    def close(self):
        """Close file."""

        # close file
        FileIO.close(self)

        # remove file
        if 'w' in self.mode:
            os.remove(self.name)
 def __execfile(name):
     try:
         f = FileIO(name)
         codestr = f.read()
         exec(codestr)
     except:
         raise RuntimeError('Failed to execute file %s' % name)
     finally:
         f.close()
 def __init__(self, data=None):
     # Raise exception if Blobs are getting subclassed
     # refer to ZODB-Bug No.127182 by Jim Fulton on 2007-07-20
     if (self.__class__ is not Blob):
         raise TypeError('Blobs do not support subclassing.')
     self.__setstate__()
     if data is not None:
         with self.open('w') as file:
             file.write(data)
示例#41
0
 def seek(self, position, whence=0):
     if whence == 1:
         position += self.__currentpos__
     elif whence == 2:
         position += self.__size__
         
     self.__currentpos__ = position
     FileIO.seek(self, self.__startpos__ + position)
     return position
示例#42
0
def bench_file_write55():
    f = FileIO(tmpf.name, "r+")
    zblk = b"\x55" * blksize
    for i in xrange(filesize // blksize):
        pos = 0
        while pos < blksize:
            n = f.write(memoryview(zblk)[pos:])
            assert n != 0
            pos += n
    f.close()
示例#43
0
def read_from_file(file_fd: io.FileIO, start: int, stop: int) -> bytes:
    length = stop - start
    assert length >= 0
    file_fd.seek(start)
    data = bytes()
    while file_fd.tell() < stop:
        read_data = file_fd.read(stop - file_fd.tell())
        if read_data == b'':
            raise ReachedEndOfFile('Read until the end of file')
        data += read_data
    assert len(data) == length
    return data
示例#44
0
文件: google.py 项目: ohmu/pghoard
 def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
     fileobj = FileIO(filepath_to_store_to, mode="wb")
     done = False
     metadata = {}
     try:
         metadata = self.get_contents_to_fileobj(key, fileobj, progress_callback=progress_callback)
         done = True
     finally:
         fileobj.close()
         if not done:
             os.unlink(filepath_to_store_to)
     return metadata
示例#45
0
 def flipBitAt(inputfile, position):
     """Flips the bit at the specified position in the input file."""
     if not 0<=position<(8*os.path.getsize(inputfile)):
         raise IndexError("Position "+str(position)+" is out of range")
     
     f = FileIO(inputfile, "r+")
     f.seek(position/8)
     byte = ord(f.read(1))
     f.seek(-1, 1)   # go back 1 byte from the current position
     bitnum = position%8
     f.write(struct.pack("B", byte^(1<<(7-bitnum))))
     f.close()
示例#46
0
def _bench_file_read(hasher, expect):
    f = FileIO(tmpf.name, "r")
    b = bytearray(blksize)

    h = hasher()
    while 1:
        n = f.readinto(b)
        if n == 0:
            break

        h.update(xbuffer(b, 0, n))  # NOTE b[:n] does copy

    f.close()
    assert h.digest() == expect
示例#47
0
 def get_contents_to_file(self, key, filepath_to_store_to):
     key = self.format_key_for_backend(key)
     self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
     fileobj = FileIO(filepath_to_store_to, mode="wb")
     done = False
     metadata = {}
     try:
         metadata = self.get_contents_to_fileobj(key, fileobj)
         done = True
     finally:
         fileobj.close()
         if not done:
             os.unlink(filepath_to_store_to)
     return metadata
示例#48
0
文件: google.py 项目: Ormod/pghoard
 def get_contents_to_file(self, obj_key, filepath_to_store_to):
     self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
     fileobj = FileIO(filepath_to_store_to, mode="wb")
     try:
         done = False
         request = self.gs_objects.get_media(bucket=self.bucket_name, object=obj_key)
         download = MediaIoBaseDownload(fileobj, request, chunksize=CHUNK_SIZE)
         while not done:
             status, done = download.next_chunk()
             if status:
                 self.log.debug("Download of %r to %r: %d%%", obj_key, filepath_to_store_to, status.progress() * 100)
     finally:
         fileobj.close()
         if not done:
             os.unlink(filepath_to_store_to)
示例#49
0
    def load(self, file: FileIO):
        self.ptr = file.tell()
        self.is_leaf, self.keys = load(file)

        ptr_num = len(self.keys)
        if not self.is_leaf:
            ptr_num += (ptr_num + 1)
        ptrs = unpack('Q' * ptr_num, file.read(8 * ptr_num))

        if self.is_leaf:
            self.ptrs_value = list(ptrs)
        else:
            self.ptrs_value = list(ptrs[:len(self.keys)])
            self.ptrs_child = list(ptrs[len(self.keys):])
        self.size = file.tell() - self.ptr
示例#50
0
def createFile(size):
 headers=['Id', 'Name', 'Balance']
 try: fp=open('sample.txt', 'w')
 except:fp=FileIO('sample.txt','w')
 fp.truncate()
 table=getTable(size)
 for row in table:
  i=0
  for item in row:
   readyItem=headers[i]+':'+item+'\n'
   i+=1
   fp.write(readyItem)
  fp.write('\n')
 fp.close()
示例#51
0
class FileDataReader(AbstractDataReader):
    """ A reader that can read data from a file
    """

    def __init__(self, filename):
        """
        :param filename: The file to read
        :type filename: str
        :raise spinnman.exceptions.SpinnmanIOException: If the file\
                    cannot found or opened for reading
        """
        try:
            self._fileio = FileIO(filename, "r")
        except IOError as e:
            raise SpinnmanIOException(str(e))

    def read(self, n_bytes):
        """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.read`
        """
        return bytearray(self._fileio.read(n_bytes))

    def readinto(self, data):
        """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.readinto`
        """
        return self._fileio.readinto(data)

    def readall(self):
        """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.readall`
        """
        return self._fileio.readall()

    def close(self):
        """ Closes the file

        :return: Nothing is returned:
        :rtype: None
        :raise spinnman.exceptions.SpinnmanIOException: If the file\
                    cannot be closed
        """
        try:
            self._fileio.close()
        except IOError as e:
            raise SpinnmanIOException(str(e))
示例#52
0
 def __init__(self, filename):
     """
     :param filename: The file to read
     :type filename: str
     :raise spinnman.exceptions.SpinnmanIOException: If the file\
                 cannot found or opened for reading
     """
     try:
         self._fileio = FileIO(filename, "r")
     except IOError as e:
         raise SpinnmanIOException(str(e))
示例#53
0
    def __init__(self, fname, endian='@', header_prec='i', *args, **kwargs):
        """Open a Fortran unformatted file for writing.
        
        Parameters
        ----------
        endian : character, optional
            Specify the endian-ness of the file.  Possible values are
            '>', '<', '@' and '='.  See the documentation of Python's
            struct module for their meanings.  The deafult is '>' (native
            byte order)
        header_prec : character, optional
            Specify the precision used for the record headers.  Possible
            values are 'h', 'i', 'l' and 'q' with their meanings from
            Python's struct module.  The default is 'i' (the system's
            default integer).

        """
        file.__init__(self, fname, *args, **kwargs)
        self.ENDIAN = endian
        self.HEADER_PREC = header_prec
示例#54
0
def _bench_file_readbig(hasher, expect):
    f = FileIO(tmpf.name, "r")
    # b = mmap(-1, filesize, MAP_SHARED | MAP_ANONYMOUS, PROT_READ | PROT_WRITE)
    b = bytearray(filesize)
    bm = memoryview(b)

    h = hasher()
    pos = 0
    while 1:
        n = f.readinto(bm[pos:])
        if n == 0:
            break

        h.update(xbuffer(b, pos, n))  # NOTE b[pos:n] does copy
        pos += n

    del bm
    del b
    f.close()
    assert h.digest() == expect
示例#55
0
文件: hidraw.py 项目: jb55/ds4drv
    def __init__(self, name, addr, type, hidraw_device, event_device):
        try:
            self.report_fd = os.open(hidraw_device, os.O_RDWR | os.O_NONBLOCK)
            self.fd = FileIO(self.report_fd, "rb+", closefd=False)
            self.input_device = InputDevice(event_device)
            self.input_device.grab()
        except (OSError, IOError) as err:
            raise DeviceError(err)

        self.buf = bytearray(self.report_size)

        super(HidrawDS4Device, self).__init__(name, addr, type)
示例#56
0
    def __init__(self, filename, nstates, natoms, vendor="PyMOL", box=None):
        file.__init__(self, filename, "wb")
        self.natoms = natoms
        self.fmt = "%df" % (natoms)
        charmm = int(nstates > 0)

        # Header
        fmt = "4s 9i d 9i"
        header = [b"CORD", nstates, 1, 1, 0, 0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  # 4s  # 9i  # d  # 9i
        if charmm:
            # DELTA is stored as a double with X-PLOR but as a float with CHARMm
            fmt = "4s 9i f 10i"
            header.append(24)  # dummy charmm version number
        self.writeFortran(header, fmt)

        # Title
        fmt = "i80s80s"
        title = [2, b"* TITLE".ljust(80), (b"* Created by " + vendor.encode()).ljust(80)]  # 1i  # 80s  # 80s
        self.writeFortran(title, fmt, length=160 + 4)

        # NATOM
        self.writeFortran([natoms], "i")
示例#57
0
文件: lib.py 项目: virajs/edgedb
def redirect_stream(stream_name: str, target_stream: io.FileIO):
    """Redirect a system stream to the specified file.

    If ``target_stream`` is None - redirect to devnull.
    """
    if target_stream is None:
        target_fd = os.open(os.devnull, os.O_RDWR)
    else:
        target_fd = target_stream.fileno()

    system_stream = getattr(sys, stream_name)
    os.dup2(target_fd, system_stream.fileno())
    setattr(sys, '__{}__'.format(stream_name), system_stream)
示例#58
0
文件: sec.py 项目: vleon1/pyRcanum
    def read_from(cls, sector_file: io.FileIO) -> "SectorInfo":

        # Save current position in file
        tell = sector_file.tell()

        # Go to end of file minus size of length.
        sector_file.seek(-cls.length_parser.size, 2)

        length,  = cls.length_parser.unpack_from_file(sector_file)

        print(length)

        objects = []

        if length:
            # Go back to saved position
            sector_file.seek(tell)

            for _ in range(length):
                objects.append(Object.read_from(sector_file))

        return SectorObjects(objects=objects)