def _store(self, tile, location): if os.path.islink(location): os.unlink(location) with tile_buffer(tile) as buf: log.debug('writing %r to %s' % (tile.coord, location)) write_atomic(location, buf.read())
def _init_bundle(self): log.info("Init Bundle %s" % self.filename) ensure_directory(self.filename) write_atomic(self.filename, struct.pack(*[self.BUNDLE_BYTEORDER + self.BUNDLE_HEADER_FORMAT + self.BUNDLE_INDEX_FORMAT] + list(self.header.values()) + self.index # empty index ))
def _store(self, tile, location): if os.path.islink(location): os.unlink(location) with tile_buffer(tile) as buf: log.debug("writing %r to %s" % (tile.coord, location)) write_atomic(location, buf.read())
def write_csv(filename, records): buf = StringIO() csv_writer = csv.writer(buf) for id, rec in records.iteritems(): csv_writer.writerow([id] + list(rec)) buf.seek(0) write_atomic(filename, buf.read())
def _init_bundle(self): ensure_directory(self.filename) header = list(BUNDLE_V1_HEADER) header[10], header[8] = self.tile_offsets header[11], header[9] = header[10]+127, header[8]+127 write_atomic(self.filename, struct.pack(BUNDLE_V1_HEADER_STRUCT_FORMAT, *header) + # zero-size entry for each tile (b'\x00' * (BUNDLEX_V1_GRID_HEIGHT * BUNDLEX_V1_GRID_WIDTH * 4)))
def _init_bundle(self): ensure_directory(self.filename) header = list(BUNDLE_HEADER) header[13], header[11] = self.tile_offsets header[14], header[12] = header[13]+127, header[11]+127 write_atomic(self.filename, struct.pack(BUNDLE_HEADER_STRUCT_FORMAT, *header) + # zero-size entry for each tile (b'\x00' * (BUNDLEX_GRID_HEIGHT * BUNDLEX_GRID_WIDTH * 4)))
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write(struct.pack(BUNDLE_V2_HEADER_STRUCT_FORMAT, *BUNDLE_V2_HEADER)) # Empty index (ArcGIS stores an offset of 4 and size of 0 for missing tiles) buf.write(struct.pack('<%dQ' % BUNDLE_V2_TILES, *(4, ) * BUNDLE_V2_TILES)) write_atomic(self.filename, buf.getvalue())
def test_not_a_file(self): # check that expected errors are not hidden filename = os.path.join(self.dirname, "tmpfile") os.mkdir(filename) try: write_atomic(filename, b"12345") except (OSError, IOError): pass else: assert False, "expected exception"
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write(BUNDLEX_HEADER) for i in range(BUNDLEX_GRID_WIDTH * BUNDLEX_GRID_HEIGHT): buf.write(struct.pack('<Q', (i*4)+BUNDLE_HEADER_SIZE)[:5]) buf.write(BUNDLEX_FOOTER) write_atomic(self.filename, buf.getvalue())
def test_not_a_file(self): # check that expected errors are not hidden filename = os.path.join(self.dirname, 'tmpfile') os.mkdir(filename) try: write_atomic(filename, b'12345') except OSError: pass else: assert False, 'expected exception'
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write( struct.pack(BUNDLE_V2_HEADER_STRUCT_FORMAT, *BUNDLE_V2_HEADER)) # Empty index (ArcGIS stores an offset of 4 and size of 0 for missing tiles) buf.write( struct.pack('<%dQ' % BUNDLE_V2_TILES, *(4, ) * BUNDLE_V2_TILES)) write_atomic(self.filename, buf.getvalue())
def store(self, legend): if legend.stored: return if legend.location is None: hash = legend_hash(legend.id, legend.scale) legend.location = os.path.join(self.cache_dir, hash) + '.' + self.file_ext ensure_directory(legend.location) data = legend.source.as_buffer(ImageOptions(format='image/' + self.file_ext), seekable=True) data.seek(0) log.debug('writing to %s' % (legend.location)) write_atomic(legend.location, data.read()) data.seek(0) legend.stored = True
def write_atomic_data(xxx_todo_changeme): (i, filename) = xxx_todo_changeme data = str(i) + '\n' + 'x' * 10000 write_atomic(filename, data.encode('utf-8')) time.sleep(0.001)
def write(self): try: write_atomic(self.filename, pickle.dumps(self.status)) except (IOError, OSError) as ex: log.error('unable to write seed progress: %s', ex)
def _write_atomic_data(i_filename): (i, filename) = i_filename data = str(i) + '\n' + 'x' * 10000 write_atomic(filename, data.encode('utf-8')) time.sleep(0.001)
def write_atomic_data((i, filename)): data = str(i) + '\n' + 'x' * 10000 write_atomic(filename, data)
def write_atomic_data((i, filename)): data = str(i) + '\n' + 'x' * 10000 write_atomic(filename, data) time.sleep(0.001)
def _write_atomic_data(i_filename): (i, filename) = i_filename data = str(i) + "\n" + "x" * 10000 write_atomic(filename, data.encode("utf-8")) time.sleep(0.001)
def write_mapproxy_config(mapproxy_conf, filename): content = yaml.safe_dump(mapproxy_conf, default_flow_style=False) write_atomic(filename, content)