def __init__(self, stream: io.FileIO): assert isinstance(stream, (io.FileIO, io.TextIOWrapper)) # NOTE pysbs requires a filepath stream.close() self.path = stream.name self.sbs = None
class StreamingBinWriter(BinWriter): """ Subclass of BinWriter to support streaming bin file to disk. """ def __init__(self, sampling_rate: int, mffdir: str, data_type: str = 'EEG'): """ **Parameters** * **`sampling_rate`**: sampling rate of all channels. Sampling rate has to fit in a 3-byte integer. See docs in `mffpy.header_block`. * **`data_type`**: name of the type of signal. * **`mffdir`**: directory of the mff recording to stream data to. **Notes** Because we are streaming the recording to disk, the folder into which it is to be saved must have been created prior to the initialization of this class. """ super().__init__(sampling_rate, data_type) filename = self.default_filename_fmt % 1 self.check_compatibility(filename) self.stream = FileIO(join(mffdir, filename), mode='w') def write(self, filename: str, *args, **kwargs): # Because the recording has been streamed to a file, all that is # required here is closing the stream self.stream.close()
def get_from_file_memory_duplicate(path): io = FileIO(path,'rb') io2 = StringIO() io2.write(io.read()) io.close() io2.seek(0, os.SEEK_SET) return ELF(io2)
def assertFlippedBit(self, file_orig, file_modded, position): len_orig = os.path.getsize(file_orig) len_modded = os.path.getsize(file_modded) self.assertEqual(len_orig, len_modded, "Files of different sizes") f_o = FileIO(file_orig, "r+b") f_m = FileIO(file_modded, "r+b") for i in xrange(len_orig): # read in a byte from each file and compare b_o = ord(f_o.read(1)) b_m = ord(f_m.read(1)) if i == (position / 8): for m in xrange(8): bit_m = BitwiseAnalyser.BitManipulator.getBitFromByteAt( b_m, m) bit_o = BitwiseAnalyser.BitManipulator.getBitFromByteAt( b_o, m) if m == (position % 8): self.assertNotEqual( bit_m, bit_o, "Bits are equal when the should be different at position: " + str(position)) else: self.assertEqual( bit_m, bit_o, "Bits are incorrectly different at position " + str(i)) else: self.assertEqual( b_o, b_m, "Bytes differ (when the shouldn't) at position " + str(i)) f_o.close() f_m.close()
def _parse_config_file_or_exit(config_file: io.FileIO) -> Dict: experiment_config = yaml.safe_load(config_file.read()) config_file.close() if not experiment_config or not isinstance(experiment_config, dict): print("Error: invalid experiment config file {}".format(config_file.name)) sys.exit(1) return experiment_config
def isatty(self): # TODO: Couldn't we just subclass FileIO? f = FileIO(self._fileno, 'r', False) try: return f.isatty() finally: f.close()
def New(self, request, context): result = fms.ReturnStatus() try: if request.type == PATH_FILE: new_file = FileIO(request.path, "w+") new_file.close() elif request.type == PATH_DIR: os.mkdir(request.path) elif request.type == PATH_SYMLINK: raise Exception("creation of symlinks not supported") elif request.type == PATH_PACKAGE: raise Exception("creation of packages not supported") result.code = OK except OSError as ose: result.code = OS_ERROR if ose.errno: result.error_code = ose.errno result.error_msg = utf8(ose.strerror) result.error_file = utf8(request.path) except IOError as ioe: result.code = IO_ERROR if ioe.errno: result.error_code = ioe.errno result.error_msg = utf8(ioe.strerror) result.error_file = utf8(ioe.filename) except Exception as err: result.code = ERROR result.error_msg = utf8(err) result.error_file = utf8(request.path) return result
def get_from_file_memory_duplicate(path): io = FileIO(path, 'rb') io2 = StringIO() io2.write(io.read()) io.close() io2.seek(0, os.SEEK_SET) return ELF(io2)
def wrapper(self, file_, *args, **kwargs): if isinstance(file_, basestring): # Using FileIO here instead of open() # to be able to override the filename # which is later used when uploading the file. # # Explanation: # # 1) requests reads the filename # from "name" attribute of a file-like object, # there is no other way to specify a filename; # # 2) The attribute may contain the full path to file, # which does not work well as a filename; # # 3) The attribute is readonly when using open(), # unlike FileIO object. file_ = FileIO(file_, 'rb') file_.name = path.basename(file_.name) if hasattr(file_, 'read'): # A file-like object must have 'read' method output = fn(self, file_, *args, **kwargs) file_.close() return output else: try: file_.close() except AttributeError: pass raise TypeError('Expected either a string ' 'containing a path to file or a ' 'file-like object, got {}'.format(type(file_)))
def flipByteAt(inputfile, position): """Flips the bits for the byte at the specified position in the input file.""" f = FileIO(inputfile, "r+") f.seek(position) byte = ord(f.read(1)) f.seek(-1, 1) # go back 1 byte from current position f.write(struct.pack("B", byte^0xFF)) # read in the byte and XOR it f.close()
def flipByteAt(inputfile, position): """Flips the bits for the byte at the specified position in the input file.""" f = FileIO(inputfile, "r+") f.seek(position) byte = ord(f.read(1)) f.seek(-1, 1) # go back 1 byte from current position f.write(struct.pack("B", byte ^ 0xFF)) # read in the byte and XOR it f.close()
class HidrawDS4Device(DS4Device): def __init__(self, name, addr, type, hidraw_device, event_device): try: self.report_fd = os.open(hidraw_device, os.O_RDWR | os.O_NONBLOCK) self.fd = FileIO(self.report_fd, "rb+", closefd=False) self.input_device = InputDevice(event_device) self.input_device.grab() except (OSError, IOError) as err: raise DeviceError(err) self.buf = bytearray(self.report_size) super(HidrawDS4Device, self).__init__(name, addr, type) def read_report(self): try: ret = self.fd.readinto(self.buf) except IOError: return # Disconnection if ret == 0: return # Invalid report size or id, just ignore it if ret < self.report_size or self.buf[0] != self.valid_report_id: return False if self.type == "bluetooth": # Cut off bluetooth data buf = zero_copy_slice(self.buf, 2) else: buf = self.buf return self.parse_report(buf) def read_feature_report(self, report_id, size): op = HIDIOCGFEATURE(size + 1) buf = bytearray(size + 1) buf[0] = report_id return fcntl.ioctl(self.fd, op, bytes(buf)) def write_report(self, report_id, data): if self.type == "bluetooth": # TODO: Add a check for a kernel that supports writing # output reports when such a kernel has been released. return hid = bytearray((report_id,)) self.fd.write(hid + data) def close(self): try: self.fd.close() self.input_device.ungrab() except IOError: pass
def process(self, output=None, vlc=False): assert not (output is None and not vlc),\ "Either output file or viewer must be valid" conn = None n_bytes = 0 if output is not None: f = FileIO(output, "w") if vlc: # open pipe to vlc to view incoming video data cmdline = [ 'vlc', '--demux', 'h264', '--network-caching=', '512', # in ms '--h264-fps', '30', '-' ] player = subprocess.Popen(cmdline, stdin=subprocess.PIPE) try: conn, addr = self.server.accept() if self.verbose: print('Connection address:', addr) while 1: data = conn.recv(1048576) if not data: break n_bytes += len(data) # write data to file and video stream if output is not None: f.write(data) if vlc: player.stdin.write(data) # indicate that we are ready for more data (move up?) conn.send("") finally: if conn is not None: conn.close() if output is not None: f.close() if vlc: player.terminate() if self.verbose: print(" total # bytes received:", n_bytes)
def close(self): name = self.name FileIO.close(self) if self.__temporary: try: os.unlink(name) except Exception as err: logger.error("Unable to remove %s: %s" % (name, err)) raise(err)
def __execfile(name): try: f = FileIO(name) codestr = f.read() exec(codestr) except: raise RuntimeError('Failed to execute file %s' % name) finally: f.close()
def save_q(Q, file_location): """Saves the current q learning values to the specified location.""" try: makedirs(dirname(file_location)) except OSError as exc: pass file = FileIO(file_location, 'w') pickle.dump(Q, file) file.close()
def close(self): """Close file.""" # close file FileIO.close(self) # remove file if 'w' in self.mode: os.remove(self.name)
def close(self): name = self.name FileIO.close(self) if self.__temporary: try: os.unlink(name) except Exception as err: logger.error("Unable to remove %s: %s" % (name, err)) raise (err)
class HidrawDS4Device(DS4Device): def __init__(self, name, addr, type, hidraw_device, event_device): try: self.report_fd = os.open(hidraw_device, os.O_RDWR | os.O_NONBLOCK) self.fd = FileIO(self.report_fd, "rb+", closefd=False) self.input_device = InputDevice(event_device) self.input_device.grab() except (OSError, IOError) as err: raise DeviceError(err) self.buf = bytearray(self.report_size) super(HidrawDS4Device, self).__init__(name, addr, type) def read_report(self): try: ret = self.fd.readinto(self.buf) except IOError: return 'ioerror' # Disconnection if ret == 0: return 'disconnection' # Invalid report size or id, just ignore it if ret < self.report_size or self.buf[0] != self.valid_report_id: return False if self.type == "bluetooth": # Cut off bluetooth data buf = zero_copy_slice(self.buf, 2) else: buf = self.buf return self.parse_report(buf) def read_feature_report(self, report_id, size): op = HIDIOCGFEATURE(size + 1) buf = bytearray(size + 1) buf[0] = report_id return fcntl.ioctl(self.fd, op, bytes(buf)) def write_report(self, report_id, data): hid = bytearray((report_id, )) self.fd.write(hid + data) def close(self): try: # Reset LED to original hidraw pairing colour. self.set_led(0, 0, 1) self.fd.close() self.input_device.ungrab() except IOError: pass
def __del__(self): if self.logger_file is not None: sys.stdout = sys.__stdout__ self.logger.seek(0) log = FileIO(self.logger_file, "w") # odd behaviour at # deconstruction # prohibits open(...) log.write(self.logger.read().encode()) log.close() print(f"written logs to {self.logger_file!r}, exiting ...")
def get_contents_to_file(self, key, filepath): fileobj = FileIO(filepath, mode="wb") done = False try: self.get_contents_to_fileobj(key, fileobj) done = True finally: fileobj.close() if not done: os.unlink(filepath)
def bench_file_write55(): f = FileIO(tmpf.name, "r+") zblk = b"\x55" * blksize for i in xrange(filesize // blksize): pos = 0 while pos < blksize: n = f.write(memoryview(zblk)[pos:]) assert n != 0 pos += n f.close()
def flipBitAt(inputfile, position): """Flips the bit at the specified position in the input file.""" if not 0 <= position < (8 * os.path.getsize(inputfile)): raise IndexError("Position " + str(position) + " is out of range") f = FileIO(inputfile, "r+") f.seek(position / 8) byte = ord(f.read(1)) f.seek(-1, 1) # go back 1 byte from the current position bitnum = position % 8 f.write(struct.pack("B", byte ^ (1 << (7 - bitnum)))) f.close()
def comparison_report(self): if self.__comparison_report is None: if os.path.isfile(self.output_location): comparisons_in = FileIO(self.output_location, mode='rb') try: comparison_failure_stream = simpleion.load(comparisons_in, single_value=False) finally: comparisons_in.close() self.__comparison_report = IonPyList.from_value(IonType.LIST, comparison_failure_stream) else: self.__comparison_report = IonPyList.from_value(IonType.LIST, []) return self.__comparison_report
def flipBitAt(inputfile, position): """Flips the bit at the specified position in the input file.""" if not 0<=position<(8*os.path.getsize(inputfile)): raise IndexError("Position "+str(position)+" is out of range") f = FileIO(inputfile, "r+") f.seek(position/8) byte = ord(f.read(1)) f.seek(-1, 1) # go back 1 byte from the current position bitnum = position%8 f.write(struct.pack("B", byte^(1<<(7-bitnum)))) f.close()
def errors(self): if self.__errors is None: if os.path.isfile(self.error_location): errors_in = FileIO(self.error_location, mode='rb') try: errors_stream = simpleion.load(errors_in, single_value=False) finally: errors_in.close() self.__errors = IonPyList.from_value(IonType.LIST, errors_stream) else: self.__errors = IonPyList.from_value(IonType.LIST, []) return self.__errors
def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None): fileobj = FileIO(filepath_to_store_to, mode="wb") done = False metadata = {} try: metadata = self.get_contents_to_fileobj(key, fileobj, progress_callback=progress_callback) done = True finally: fileobj.close() if not done: os.unlink(filepath_to_store_to) return metadata
def _parse_config_file_or_exit(config_file: io.FileIO, config_overrides: Iterable[str]) -> Dict: experiment_config = util.safe_load_yaml_with_exceptions(config_file) config_file.close() if not experiment_config or not isinstance(experiment_config, dict): print("Error: invalid experiment config file {}".format( config_file.name)) sys.exit(1) parse_config_overrides(experiment_config, config_overrides) return experiment_config
def get_zipdata(self): cache_file_name = self.cache_filename stream = FileIO(cache_file_name, mode='w') zipfile = ZipFile(stream, 'w') self.write_zipfile(zipfile) zipfile.close() stream.close() stream = FileIO(cache_file_name, mode='r') zipdata = stream.readall() stream.close() remove(cache_file_name) return zipdata
def importUsers(self): """ Store imported users in a new .htpasswd style file. """ log.info(' %d Users...' % (len(self.jiraData['users']))) line = '%s:%s\n' output = FileIO(self.authentication, 'w') output.write(line % (self.username, create_hash(self.password))) for user in self.jiraData['users']: output.write(line % (user['name'], user['password'])) output.close()
def createFile(size): headers=['Id', 'Name', 'Balance'] try: fp=open('sample.txt', 'w') except:fp=FileIO('sample.txt','w') fp.truncate() table=getTable(size) for row in table: i=0 for item in row: readyItem=headers[i]+':'+item+'\n' i+=1 fp.write(readyItem) fp.write('\n') fp.close()
def import_users(self): """ Store imported users in a new .htpasswd style file. """ log.info(' {} Users...'.format(len(self.jiraData['users']))) line = '{}:{}\n' output = FileIO(self.authentication, 'w') output.write(line.format(self.username, create_hash(self.password))) for user in self.jiraData['users']: output.write(line.format(user['name'], user['password'])) output.close()
def get_contents_to_file(self, key, filepath_to_store_to): key = self.format_key_for_backend(key) self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to) fileobj = FileIO(filepath_to_store_to, mode="wb") done = False metadata = {} try: metadata = self.get_contents_to_fileobj(key, fileobj) done = True finally: fileobj.close() if not done: os.unlink(filepath_to_store_to) return metadata
def _bench_file_read(hasher, expect): f = FileIO(tmpf.name, "r") b = bytearray(blksize) h = hasher() while 1: n = f.readinto(b) if n == 0: break h.update(xbuffer(b, 0, n)) # NOTE b[:n] does copy f.close() assert h.digest() == expect
def get_contents_to_file(self, obj_key, filepath_to_store_to): self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to) fileobj = FileIO(filepath_to_store_to, mode="wb") try: done = False request = self.gs_objects.get_media(bucket=self.bucket_name, object=obj_key) download = MediaIoBaseDownload(fileobj, request, chunksize=CHUNK_SIZE) while not done: status, done = download.next_chunk() if status: self.log.debug("Download of %r to %r: %d%%", obj_key, filepath_to_store_to, status.progress() * 100) finally: fileobj.close() if not done: os.unlink(filepath_to_store_to)
def __execute_with(self, ion_implementation, error_location, args): stderr = ion_implementation.execute(*args) if len(stderr) != 0: # Any output to stderr is likely caused by an uncaught error in the implementation under test. This forces a # failure to avoid false negatives. error_file = FileIO(error_location, 'wb') try: error = { TestFile.ERROR_TYPE_FIELD: TestFile.ERROR_TYPE_STATE_SYMBOL, TestFile.ERROR_MESSAGE_FIELD: 'Implementation %s produced stderr output "%s" for command %r.' % ( ion_implementation.identifier, stderr.decode(), args ), TestFile.ERROR_LOCATION_FIELD: self.path } simpleion.dump(error, error_file, binary=False) finally: error_file.close()
def what(file, h=None): f = None try: if h is None: # if isinstance(file, (str, PathLike)) if isinstance(file, str): # FIXME(corona10): RustPython doesn't support PathLike yet. f = FileIO(file, 'rb') h = f.read(32) else: location = file.tell() h = file.read(32) file.seek(location) for tf in tests: res = tf(h, f) if res: return res finally: if f: f.close() return None
def assertFlippedByte(self, file_orig, file_modded, position): len_orig = os.path.getsize(file_orig) len_modded = os.path.getsize(file_modded) self.assertEqual(len_orig, len_modded, "Files of different sizes") f_o = FileIO(file_orig, "r+b") f_m = FileIO(file_modded, "r+b") for i in xrange(len_orig): # read in a byte from each file and compare b_o = ord(f_o.read(1)) b_m = ord(f_m.read(1)) if i==position: self.assertEqual(b_m, b_o^0xff, "Flipped bytes are actually equal at position "+str(i)) else: self.assertEqual(b_o, b_m, "Bytes differ (when the shouldn't) at position "+str(i)) f_o.close() f_m.close()
class FileDataReader(AbstractDataReader): """ A reader that can read data from a file """ def __init__(self, filename): """ :param filename: The file to read :type filename: str :raise spinnman.exceptions.SpinnmanIOException: If the file\ cannot found or opened for reading """ try: self._fileio = FileIO(filename, "r") except IOError as e: raise SpinnmanIOException(str(e)) def read(self, n_bytes): """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.read` """ return bytearray(self._fileio.read(n_bytes)) def readinto(self, data): """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.readinto` """ return self._fileio.readinto(data) def readall(self): """ See :py:meth:`spinnman.data.abstract_data_reader.AbstractDataReader.readall` """ return self._fileio.readall() def close(self): """ Closes the file :return: Nothing is returned: :rtype: None :raise spinnman.exceptions.SpinnmanIOException: If the file\ cannot be closed """ try: self._fileio.close() except IOError as e: raise SpinnmanIOException(str(e))
def _bench_file_readbig(hasher, expect): f = FileIO(tmpf.name, "r") # b = mmap(-1, filesize, MAP_SHARED | MAP_ANONYMOUS, PROT_READ | PROT_WRITE) b = bytearray(filesize) bm = memoryview(b) h = hasher() pos = 0 while 1: n = f.readinto(bm[pos:]) if n == 0: break h.update(xbuffer(b, pos, n)) # NOTE b[pos:n] does copy pos += n del bm del b f.close() assert h.digest() == expect
class filestream_range_iterator(Iterable): """ A class that mimics FileIO and implements an iterator that returns a fixed-sized sequence of bytes. Beginning from `start` to `end`. BBB: due to a possible bug in Zope>4, <=4.1.3, couldn't be subclass of FileIO as Iterators.filestream_iterator """ def __init__(self, name, mode='rb', bufsize=-1, streamsize=1 << 16, start=0, end=None): self._io = FileIO(name, mode=mode) self.streamsize = streamsize self.start = start self.end = end self._io.seek(start, 0) def __iter__(self): if self._io.closed: raise ValueError("I/O operation on closed file.") return self def __next__(self): if self.end is None: bytes = self.streamsize else: bytes = max(min(self.end - self._io.tell(), self.streamsize), 0) data = self._io.read(bytes) if not data: raise StopIteration return data next = __next__ def close(self): self._io.close() # BBB: is it necessary to implement __len__ ? # def __len__(self) def read(self, size=-1): return self._io.read(size)
def __download_file(self, file_id, path, filename, mime_type): request = self.__service.files().get_media(fileId=file_id) filename = filename.replace('/', '') fh = FileIO('{}{}'.format(path, filename), 'wb') downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024) done = False while not done: if self.is_cancelled: fh.close() break try: self.dstatus, done = downloader.next_chunk() except HttpError as err: if err.resp.get('content-type', '').startswith('application/json'): reason = jsnloads(err.content).get('error').get( 'errors')[0].get('reason') if reason not in [ 'downloadQuotaExceeded', 'dailyLimitExceeded', ]: raise err if USE_SERVICE_ACCOUNTS: if self.__sa_count == len( listdir("accounts")) or self.__sa_count > 50: self.is_cancelled = True raise err else: self.__switchServiceAccount() LOGGER.info(f"Got: {reason}, Trying Again...") return self.__download_file( file_id, path, filename, mime_type) else: self.is_cancelled = True LOGGER.info(f"Got: {reason}") raise err self._file_downloaded_bytes = 0
def save(self, file: FileIO, close=False, chunk_size=2**10): """Write the image from Imgur to a file object.""" r = requests.get(self._source, stream=True) # attempt to retrieve image size try: size = int(r.headers['content-length']) pro = True except KeyError: size = 0 pro = False done = 0 # download image and return progress for chunk in r.iter_content(chunk_size): file.write(chunk) done += len(chunk) yield done / size if pro else 1 # close file at end of write automatically if close: file.close()
def convert(): fp=open('sample.txt') raw=fp.read() fp.close() filter1=raw.split('\n') filter2=[i for i in filter1 if i!=''] filter3=[ [filter2[i], filter2[i+1], filter2[i+2]] for i in xrange(0,len(filter2),3) ] filter4=[ [col.replace(': ', ':').split(':') for col in row] for row in filter3 ] filter5=[OrderedDict(row) for row in filter4] try: jfp=open('sample.json', 'w') except: jfp=FileIO('sample.json', 'w') jfp.truncate() json.dump(filter5, jfp, indent=4) jfp.close()
def assertFlippedByte(self, file_orig, file_modded, position): len_orig = os.path.getsize(file_orig) len_modded = os.path.getsize(file_modded) self.assertEqual(len_orig, len_modded, "Files of different sizes") f_o = FileIO(file_orig, "r+b") f_m = FileIO(file_modded, "r+b") for i in xrange(len_orig): # read in a byte from each file and compare b_o = ord(f_o.read(1)) b_m = ord(f_m.read(1)) if i == position: self.assertEqual( b_m, b_o ^ 0xff, "Flipped bytes are actually equal at position " + str(i)) else: self.assertEqual( b_o, b_m, "Bytes differ (when the shouldn't) at position " + str(i)) f_o.close() f_m.close()
def assertFlippedBit(self, file_orig, file_modded, position): len_orig = os.path.getsize(file_orig) len_modded = os.path.getsize(file_modded) self.assertEqual(len_orig, len_modded, "Files of different sizes") f_o = FileIO(file_orig, "r+b") f_m = FileIO(file_modded, "r+b") for i in xrange(len_orig): # read in a byte from each file and compare b_o = ord(f_o.read(1)) b_m = ord(f_m.read(1)) if i==(position/8): for m in xrange(8): bit_m = BitwiseAnalyser.BitManipulator.getBitFromByteAt(b_m, m) bit_o = BitwiseAnalyser.BitManipulator.getBitFromByteAt(b_o, m) if m==(position%8): self.assertNotEqual(bit_m, bit_o, "Bits are equal when the should be different at position: "+str(position)) else: self.assertEqual(bit_m, bit_o, "Bits are incorrectly different at position "+str(i)) else: self.assertEqual(b_o, b_m, "Bytes differ (when the shouldn't) at position "+str(i)) f_o.close() f_m.close()
def parse_file(self): """Parses the video file, obtaining metadata that can be accessed thru this class' properties. :raises ValueError: File is not an MP4 format video. """ the_file = FileIO(self.path, 'rb') # the mimetype could be incorrect # we'll let the file decide if not self.video_format in self.mimetype: the_file.seek(0x00, SEEK_SET) first_12 = the_file.read(12) # split the dword and the ftyp size_dword = struct.unpack('>I', first_12[0:4])[0] ftyp_val = first_12[4:] # validate if mp4 if size_dword > 0: if ftyp_val not in self.supported_ftypes: the_file.close() raise ValueError("{} is not an MP4 video.".format( self.name)) else: the_file.close() raise ValueError("{} is not an MP4 video.".format(self.name)) # determine the size of the `compatible_brand` field # this is the very first DWORD of the file the_file.seek(0x00, SEEK_SET) compat_brand_end = the_file.read(4) compat_brand_end = struct.unpack('>I', compat_brand_end)[0] compat_brand_size = compat_brand_end - 0x10 # get the `compatible_brand` field the_file.seek(0x10, SEEK_SET) compat_brand = the_file.read(compat_brand_size) # PARSE THE FILE!!! try: if compat_brand in self.supported_brands: self._read_mp4_container(the_file, compat_brand_end) except NoMoovAtomException: #TODO: ADD LOGGING #FIXME: MAKE THIS INTO A LOGGER print("WARNING: {} has no moov atom!".format(self.name)) except NoReadVideoHeaderException: print("WARNING: Couldn't get information from {}!".format( self.name)) the_file.close() self._parsed_header = True
def save_state(self): cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb') cPickle.dump((self.pkgnames, self.deps, self.provides), cache, 2) cache.close()
def writeCostumeFile(idx, path, name): file = FileIO(path, 'w') file.write(top) file.write(getXMLTag(idx, name)) file.write(bottom) file.close()
def my_close(file:io.FileIO, byar:bytearray): file.seek(-1*len(byar), io.SEEK_CUR) file.write(byar) file.close()
# Finally, do the conversion based on all the information above. for fileNum, fileInDir in enumerate(filesInDir): outputPath = os.path.join(os.path.normpath(options.outputDir), os.path.splitext(os.path.basename(fileInDir))[0] + ".P90.temp") printProgressIndicator(sys.stderr, os.path.basename(fileInDir), fileNum + 1, len(filesInDir), "Converting to Standard Fortran") outputStream = FileIO(outputPath, mode="wb") try: f90printer = H90toF90Printer( ImmutableDOMDocument(cgDoc), #using our immutable version we can speed up ALL THE THINGS through caching implementationsByTemplateName, outputStream, moduleNodesByName, parallelRegionData, symbolAnalysisByRoutineNameAndSymbolName, symbolsByModuleNameAndSymbolName, symbolsByRoutineNameAndSymbolName, ) f90printer.processFile(fileInDir) except UsageError as e: logging.error('Error: %s' %(str(e))) sys.exit(1) except Exception as e: logging.critical('Error when generating P90.temp from h90 file %s: %s%s\n' \ %(str(fileInDir), str(e), traceback.format_exc()) ) logging.info(traceback.format_exc()) os.unlink(outputPath) sys.exit(1) finally: outputStream.close() progressIndicatorReset(sys.stderr)
def close(self): self.blob.closed(self) file.close(self)
class File(RawIOBase): 'Create a file object wrapping an e[x]ploded zip file' HEADER = 0 DATA = 1 DESCRIPTOR = 2 DIRECTORY = 3 def __init__(self, path, flags, info, fh=None, base='.', depth=0): super(File, self).__init__() self.path = path self.flags = flags self.fh = fh self.info = info self.depth = depth self.cursor = 0 self.offset = 0 self.state = File.HEADER # stream item info self.stream_offset = 0 self.zip_header = b'' self.descriptor = b'' # data file info self.data = None self.data_name = '' self.data_len = 0 # streams prefix = os.path.join(base, 'meta', os.path.basename(path)) self.stream = FileIO(prefix + '.stream', 'rb') self.dir = FileIO(prefix + '.dir', 'rb') self.data_dir = os.path.join(base, 'data') # init self._load_stream_item() self.lock = threading.Lock() def _load_stream_item(self): 'Sets the next stream item as current.' if self.data: self.data.close() self.data = None # open the header so we can know the data file to open, and the # length of the var fields raw_header = self.stream.read(STREAM_ITEM.size) header = StreamItem._make(STREAM_ITEM.unpack(raw_header)) var_fields = header.filename_len + header.extra_field_len # I would think that b2a_hex should decode the raw bytes... sha1 = b2a_hex(header.sha).decode('ascii') # only save the zip part of the header self.zip_header = (raw_header[:HEADER_DIFF] + self.stream.read(var_fields)) self.descriptor = self.stream.read(header.descriptor_len) self.data_name = path.join(*([self.data_dir] + list(sha1[:self.depth]) + [sha1])) def _open_data_file(self): self.data = FileIO(self.data_name, 'rb') self.data_len = self.data.seek(0, 2) self.data.seek(0) def close(self): self.stream.close() self.dir.close() if self.data: self.data.close() def fileno(self): return self.fh def isatty(self): return False def read(self, count=-1): if count < 0: return self.readall() elif count == 0: return b'' state = self.state if state == File.HEADER: previous_offset = self.offset self.offset += count result = self.zip_header[previous_offset:self.offset] self.cursor += len(result) if self.offset >= len(self.zip_header): self.state = File.DATA if not self.data: self._open_data_file() return result elif state == File.DATA: result = self.data.read(count) self.cursor += len(result) if self.data.tell() >= self.data_len: self.state = File.DESCRIPTOR self.offset = 0 # empty data file (state will now be DESCRIPTOR) if not result: return self.read(count) return result elif state == File.DESCRIPTOR: previous_offset = self.offset self.offset += count result = self.descriptor[previous_offset:self.offset] self.cursor += len(result) if self.offset >= len(self.descriptor): if self.cursor >= self.info.directory_offset: self.state = File.DIRECTORY self.dir.seek(0) self.stream_offset = None if self.data: self.data.close() self.data = None else: self.state = File.HEADER self.offset = 0 self.stream_offset = self.stream.tell() self._load_stream_item() # descriptor is optional (state will now be HEADER or DIRECTORY) if not result: return self.read(count) return result elif state == File.DIRECTORY: result = self.dir.read(count) self.cursor += len(result) return result else: raise RuntimeError('Invalid state: %r' % self.state) def readable(self): return True def readinto(self, b): count = len(b) if count == 0: return 0 state = self.state if state == File.HEADER: header_len = len(self.zip_header) previous_offset = self.offset current_offset = self.offset = \ min(previous_offset + count, header_len) read = current_offset - previous_offset b[:read] = self.zip_header[previous_offset:current_offset] self.cursor += read if current_offset == header_len: self.state = File.DATA if not self.data: self._open_data_file() return read elif state == File.DATA: read = self.data.readinto(b) self.cursor += read if self.data.tell() >= self.data_len: self.state = File.DESCRIPTOR self.offset = 0 # empty data file (state will now be DESCRIPTOR) if not read: return self.readinto(b) return read elif state == File.DESCRIPTOR: descriptor_len = len(self.descriptor) previous_offset = self.offset current_offset = self.offset = \ min(previous_offset + count, descriptor_len) read = current_offset - previous_offset b[:read] = self.descriptor[previous_offset:current_offset] self.cursor += read if current_offset == descriptor_len: if self.cursor >= self.info.directory_offset: self.state = File.DIRECTORY self.dir.seek(0) self.stream_offset = None if self.data: self.data.close() self.data = None else: self.state = File.HEADER self.offset = 0 self.stream_offset = self.stream.tell() self._load_stream_item() # descriptor is optional (state will now be HEADER or DIRECTORY) if not read: return self.readinto(b) return read elif state == File.DIRECTORY: read = self.dir.readinto(b) self.cursor += read return read else: raise RuntimeError('Invalid state: %r' % self.state) def seek(self, pos, offset=0): if offset == 1: pos += self.cursor elif offset == 2: pos += self.info.filesize if pos == self.cursor: return pos self.cursor = pos # skip directly to the central directory if pos >= self.info.directory_offset: if self.data: self.data.close() self.data = None self.state = File.DIRECTORY self.stream_offset = None self.dir.seek(pos - self.info.directory_offset) return pos # calculate the offset into the stream file z_offset, s_offset = self.info.jump_tree.find(pos).location additional = pos - z_offset # we're looking at a different data file # (load local header into memory) if s_offset != self.stream_offset: self.stream_offset = s_offset self.stream.seek(s_offset) self._load_stream_item() header_len = len(self.zip_header) if additional < header_len: self.state = File.HEADER self.offset = additional return pos # assume currently in the data file additional -= header_len self.state = File.DATA # if the file hasn't been opened yet, open it and find its size if not self.data: self._open_data_file() if additional < self.data_len: self.data.seek(additional) else: self.state = File.DESCRIPTOR self.offset = additional - self.data_len return pos def seekable(self): return True def tell(self): return self.cursor def writeable(self): return False
def __exit__(self, *args, **kwargs): """ Close the file. """ return FileIO.close(self)
def export_file(self, basedir): filename = os.path.join(basedir, self.filename) fileio = FileIO(filename, "w") self.export.draw(self, fileio) fileio.close() return filename
def streamer(): # Start by loading up available options args, other_args = util.parseopts() debug('Setting up command channel') pid = os.getpid() with open(args.pidfile, 'w') as pidfile: pidfile.write('{0}'.format(pid)) cmd_fifo = '/tmp/rscad_streamer_{0}'.format(pid) if os.path.exists(cmd_fifo): # This shouldn't happen try to rm it os.unlink(cmd_fifo) os.mkfifo(cmd_fifo) cmd_chan = FileIO(cmd_fifo, 'r+') # Load up plugins and parse plugin specific command line opts debug('loading plugins') plugin_args = loadPlugins(args.path, args.plugins, other_args) # get an appropriate rscad object debug('making rscad obj') RSCAD = rscad.rscadfactory(args.rscad, args.ffile) debug('RSCAD: %s' % (type(RSCAD))) ## Need a (e)poll object - plugins implement input # If we're on linux, use epoll's level triggered event interface, # it's a fast poll()! try: poller = select.epoll() except AttributeError: # Not on linux, use poll() try: poller = select.poll() except: # Don't have poll() either? Quit using windows! print('Must be run a platform that supports poll() or epoll()') # Add the command channel to the poller poller.register(cmd_chan.fileno(), select.POLLIN) # Init plugins - set up (e)poll for cases that care [poller.register(fileno, select.POLLIN) for fileno in [ p.init(plugin_args) for p in RSCADPlugin.plugins] if fileno is not None] ## get any plugin commands debug('Registering plugin specific commands') pcommands = dict() [pcommands.update(p.register_commands()) for p in RSCADPlugin.plugins] # Need to write rscad script to RSCAD before starting the event loop ## Hook up piping RSCAD.connect() ## main loop try: debug('starting main loop') while True: debug('Looping...') ## read script file until EOF, pumping it to RSCAD rscad_file = RSCAD.makefile() while True: line = args.script.readline() if line == '': # rewind the file for the next pass try: args.script.seek(0, 0) except IOError: # probably stdin pass break rscad_file.write(line) rscad_file.flush() ## Wait for sequence point for line in RSCAD.waitforsync('seq1'): debug('loop line: %s' % (line)) [p.handle_output(line) for p in RSCADPlugin.plugins] # check for incomming data fd = poller.poll(0) debug('Got filedes {0}'.format(fd)) if cmd_chan.fileno() in [fdes[0] for fdes in fd]: handle_command(cmd_chan, pcommands) else: # loop through plugins calling handle_data # it's up to the plugin to make sure the data belongs to it [[p.handle_input(filedes[0], RSCAD) for p in RSCADPlugin.plugins] for filedes in fd] time.sleep(args.sleeptime) finally: debug('Cleaning up') cmd_chan.close() os.unlink(cmd_chan.name) [p.cleanup() for p in RSCADPlugin.plugins] util.cleanup(RSCAD, args.script) os.unlink(args.pidfile)
def load_q(file_location): """Loads the q learning values from the specified file.""" file = FileIO(file_location, 'r') Q = pickle.load(file) file.close() return Q