def TempFile(infile=None): "Create a suitable temporary file" outfile = TemporaryFile() if infile and hasattr(infile, 'read'): outfile.writelines(infile) outfile.seek(0) return outfile
def exportFile(self, oid, f=None): if f is None: f = TemporaryFile() elif isinstance(f, str): f = open(f, 'w+b') f.write('ZEXP') oids = [oid] done_oids = {} done = done_oids.has_key load = self._storage.load while oids: oid = oids.pop(0) if oid in done_oids: continue done_oids[oid] = True try: p, serial = load(oid, self._version) except: logger.debug("broken reference for oid %s", repr(oid), exc_info=True) else: referencesf(p, oids) f.writelines([oid, p64(len(p)), p]) f.write(export_end_marker) return f
def exportFile(self, oid, f=None): if f is None: f = TemporaryFile() elif isinstance(f, str): f = open(f,'w+b') f.write('ZEXP') oids = [oid] done_oids = {} done=done_oids.has_key load=self._storage.load while oids: oid = oids.pop(0) if oid in done_oids: continue done_oids[oid] = True try: p, serial = load(oid, self._version) except: logger.debug("broken reference for oid %s", repr(oid), exc_info=True) else: referencesf(p, oids) f.writelines([oid, p64(len(p)), p]) f.write(export_end_marker) return f
def addValues(self, values, experiment): if len(self.variables) == len(values): readyProtocol = self.info newProtocol = [] for i in range(0, len(readyProtocol)): mystring = readyProtocol[i] newProtocol.append(mystring) for v in range(0, len(values)): newProtocol[i] = newProtocol[i].replace(self.variables[v], values[v]) from tempfile import TemporaryFile protocolFile = TemporaryFile(mode='r+') protocolFile.writelines(newProtocol) protocolFile.seek(0) line = protocolFile.readline() if experiment.platform != 'human': experiment.addComment('------ BEGIN PROTOCOL ' + self.name + ', variables: ' + ' '.join(self.variables) + '; values: ' + ' '.join(values) + ' ------') else: experiment.addComment('$') while line != '': splitline = line.split() LineToList(splitline, protocolFile, experiment) line = protocolFile.readline() if experiment.platform != 'human': experiment.addComment('------ END PROTOCOL ' + self.name + ' ------') else: experiment.addComment('$')
def write_sorted_file_chunk(lines, temp_files_list): """ Function used for writing sorted line chunks to temporary file :param lines: list of strings :param temp_files_list: list of file objects """ lines.sort(key=lambda line: line.split(' ')[2]) temp_file = TemporaryFile() temp_file.writelines(lines) temp_file.seek(0) temp_files_list.append(temp_file)
def decompressMessage(self, src): height = int(src[0]) width = int(src[1]) tmp = TemporaryFile() tmp.writelines(src[2:]) tmp.seek(0) npz = np.load(tmp) body = npz['img'] ret = np.ndarray((height, width), dtype=np.uint8) for i in range(height*width): if body[i]: ret[i/width][i%width] = 255
def callback(self, sarray): height = sarray[0] width = sarray[1] tmp = TemporaryFile() tmp.writelines(sarray.data) tmp.seek(0) data = np.load(tmp) img = data['img'] img.shape = (img.shape[0], img.shape[1], 1) imgmsg = self.bridge.cv2_to_imgmsg(img) imgmsg.encoding = 'mono8' self.pub.publish(imgmsg)
def write(self, data): if not self._isFile: if self._stream.tell() + len(data) > self._smallFileSize: # We are about to go over the small file size limit # Move all the contents out to a temporary file and continue # writing there. from tempfile import TemporaryFile fstream = TemporaryFile() self._stream.seek(0) fstream.writelines(self._stream) self._stream.close() self._stream = fstream self._isFile = True self._stream.write(data)
def plates(): a = request.body.read().decode().split('\n')[2:-2] plateFile = TemporaryFile(mode='r+') plateFile.writelines(a) plateFile.seek(0) plateIndexes = {} plateNicknames = {} experiment = False PlateFileParse(plateFile, experiment, plateNicknames, plateIndexes) plates = [] for key in plateIndexes.keys(): plates.append([key, [plateIndexes[key][0], plateIndexes[key][1]], plateNicknames[key]]) tojs = json_dumps(plates) print(plateNicknames, plateIndexes) return tojs
def bulk_insert(self, rawData, owner='default'): """ 暂不使用 :param owner:对应settings中的数据库key,默认为default :param rawData:原始数据列表,每个元素是一行数据库记录 :return:提交成功返回True,提交失败返回False """ try: tempf = TemporaryFile(mode='w+t') for row in rawData: tempf.writelines([unicode(item)+' ' for item in row]) tempf.seek(0) tempf.close() cursor = connections[owner].cursor() cursor.execute(self.sql) transaction.commit_unless_managed(owner) except Exception, e: transaction.rollback(owner) return False
def fpost( myHeaders={ 'User-Agent': 'PcGroup Util Client', 'content-type': 'application/json; charset=UTF-8', 'Accept-Encoding': '*/*' }): url = 'http://192.168.12.81:9200/_bulk' data = "{ \"index\" : { \"_index\" : \"test2\", \"_type\" : \"log\"} }\n" data = data + "{\"ok\":\"a\"," + "\"ok2\":1}\n" data = data + "{\"ok\":\"b\"," + "\"ok2\":2}" temp = TemporaryFile() temp.writelines('{"index" : { "_index" : "test2", "_type" : "log"}}') temp.writelines('{"ok":"a","ok2":1}') #temp.write('{"ok":"a","ok2":1}') #temp.write(data) #temp.flush() temp.seek(0) print temp.readline() print temp.readline() print temp.mode files = { 'file': open('/Users/sky/Documents/company/esManager/pcPython/logs/test.log', "rb") } #files = {'file': temp} r = requests.post(url, base64.b64encode(data.encode("utf8")).decode("ascii"), headers=myHeaders) print r.request.headers print r.headers #r = requests.request("POST" , url , files = files, headers = myHeaders) print r.text
def addValues(self, values, experiment): if len(self.variables) == len(values): readyProtocol = self.info newProtocol = [] for i in range(0, len(readyProtocol)): mystring = readyProtocol[i] newProtocol.append(mystring) for v in range(0, len(values)): newProtocol[i] = newProtocol[i].replace(self.variables[v], values[v]) from tempfile import TemporaryFile protocolFile = TemporaryFile(mode='r+') protocolFile.writelines(newProtocol) protocolFile.seek(0) line = protocolFile.readline() experiment.addComment('------ BEGIN PROTOCOL ' + self.name + ', variables: ' + ' '.join(self.variables) + '; values: ' + ' '.join(values) + ' ------') while line != '': splitline = line.split() LineToList(splitline, protocolFile, experiment) line = protocolFile.readline() experiment.addComment('------ END PROTOCOL ' + self.name + ' ------')
def exportFile(self, oid, f=None): if f is None: f = TemporaryFile(prefix="EXP") elif isinstance(f, six.string_types): f = open(f, 'w+b') f.write(b'ZEXP') oids = [oid] done_oids = {} done = done_oids.__contains__ load = self._storage.load supports_blobs = IBlobStorage.providedBy(self._storage) while oids: oid = oids.pop(0) if oid in done_oids: continue done_oids[oid] = True try: p, serial = load(oid) except: logger.debug("broken reference for oid %s", repr(oid), exc_info=True) else: referencesf(p, oids) f.writelines([oid, p64(len(p)), p]) if supports_blobs: if not isinstance(self._reader.getGhost(p), Blob): continue # not a blob blobfilename = self._storage.loadBlob(oid, serial) f.write(blob_begin_marker) f.write(p64(os.stat(blobfilename).st_size)) blobdata = open(blobfilename, "rb") cp(blobdata, f) blobdata.close() f.write(export_end_marker) return f
def testAddMapping(self): mount = ( "C:/cygwin/bin on /usr/bin type ntfs (binary,auto){LF}" "C:/cygwin/lib on /usr/lib type ntfs (binary,auto){LF}" "C:/cygwin on / type ntfs (binary,auto){LF}" "C: on /cygdrive/c type ntfs (binary,posix=0,user,noumount,auto){LF}" "".format(LF="\n") ); f = TemporaryFile(mode='w+'); f.writelines(mount); f.seek(0); mtab = f.readlines(); f.close(); mapping = { '/usr/bin/': "C:/cygwin/bin/", '/usr/lib/': "C:/cygwin/lib/", '/cygdrive/c/': "C:/", }; self.obj._addMapping(mtab); self.assertEqual(self.obj.getMap(), mapping); self.assertEqual(self.obj.getMountRoot(), "C:/cygwin/");
def exportFile(self, oid, f=None): if f is None: f = TemporaryFile(prefix="EXP") elif isinstance(f, six.string_types): f = open(f,'w+b') f.write(b'ZEXP') oids = [oid] done_oids = {} done = done_oids.__contains__ load = self._storage.load supports_blobs = IBlobStorage.providedBy(self._storage) while oids: oid = oids.pop(0) if oid in done_oids: continue done_oids[oid] = True try: p, serial = load(oid) except: logger.debug("broken reference for oid %s", repr(oid), exc_info=True) else: referencesf(p, oids) f.writelines([oid, p64(len(p)), p]) if supports_blobs: if not isinstance(self._reader.getGhost(p), Blob): continue # not a blob blobfilename = self._storage.loadBlob(oid, serial) f.write(blob_begin_marker) f.write(p64(os.stat(blobfilename).st_size)) blobdata = open(blobfilename, "rb") cp(blobdata, f) blobdata.close() f.write(export_end_marker) return f
def testAddMapping(self): pm = self._createPathMapper(self._var_root, self._var_cygwin_p) mount = ( "C:/cygwin/bin on /usr/bin type ntfs (binary,auto){LF}" "C:/cygwin/lib on /usr/lib type ntfs (binary,auto){LF}" "C:/cygwin on / type ntfs (binary,auto){LF}" "C: on /cygdrive/c type ntfs (binary,posix=0,user,noumount,auto){LF}" "".format(LF="\n")) f = TemporaryFile(mode='w+') f.writelines(mount) f.seek(0) mtab = f.readlines() f.close() mapping = { '/usr/bin/': "C:/cygwin/bin/", '/usr/lib/': "C:/cygwin/lib/", '/cygdrive/c/': "C:/", } pm._addMapping(mtab) self.assertEqual(pm.getMap(), mapping) self.assertEqual(pm.getMountRoot(), "C:/cygwin/")
(save_indexed_png_glob "%s") (gimp-quit 0) """ % (pattern, ) import sys def y_or_n(): while True: response = sys.stdin.readline().strip() if response == 'y': return True elif response == 'n': return False else: print "Please enter 'y' or 'n'" sys.stdout.write("""WARNING: This script will permanently convert (and reduce the quality of) all files matching the pattern "%s" in this directory. Please ensure you have backed up the originals. PNGs already indexed will not be affected. Are you sure you wish to proceed? (y/n) """ % (pattern, )) if y_or_n(): from subprocess import * from tempfile import TemporaryFile t = TemporaryFile() t.writelines(scm) t.seek(0) p = Popen("gimp -ib -", shell=True, stdin=t) p.communicate() #wait for completion else: print "Conversion aborted."
class S3File(io.IOBase): """File like proxy for s3 files, manages upload and download of locally managed temporary file """ def __init__(self, bucket, key, mode='w+b', *args, **kwargs): super(S3File, self).__init__(*args, **kwargs) self.bucket = bucket self.key = key self.mode = mode self.path = self.bucket + '/' + self.key # converts mode to readable/writable to enable the temporary file to have S3 data # read or written to it even if the S3File is read/write/append # i.e. "r" => "r+", "ab" => "a+b" updatable_mode = re.sub(r'^([rwa]+)(b?)$', r'\1+\2', mode) self._tempfile = TemporaryFile(updatable_mode) try: with s3errors(self.path): if 'a' in mode: # File is in an appending mode, start with the content in file s3.Object(bucket, key).download_fileobj(self._tempfile) self.seek(0, os.SEEK_END) elif 'a' not in mode and 'w' not in mode and 'x' not in mode: # file is not in a create mode, so it is in read mode # start with the content in the file, and seek to the beginning s3.Object(bucket, key).download_fileobj(self._tempfile) self.seek(0, os.SEEK_SET) except Exception: self.close() raise def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def close(self): try: if self.writable(): self.seek(0) with s3errors(self.path): s3.Object(self.bucket, self.key).upload_fileobj(self._tempfile) finally: self._tempfile.close() @property def closed(self): return self._tempfile.closed def fileno(self): return self._tempfile.fileno() def flush(self): return self._tempfile.flush() def isatty(self): return self._tempfile.isatty() def readable(self): return 'r' in self.mode or '+' in self.mode def read(self, n=-1): if not self.readable(): raise IOError('not open for reading') return self._tempfile.read(n) def readinto(self, b): return self._tempfile.readinto(b) def readline(self, limit=-1): if not self.readable(): raise IOError('not open for reading') return self._tempfile.readline(limit) def readlines(self, hint=-1): if not self.readable(): raise IOError('not open for reading') return self._tempfile.readlines(hint) def seek(self, offset, whence=os.SEEK_SET): self._tempfile.seek(offset, whence) return self.tell() def seekable(self): return True def tell(self): return self._tempfile.tell() def writable(self): return 'w' in self.mode or 'a' in self.mode or '+' in self.mode or 'x' in self.mode def write(self, b): if not self.writable(): raise IOError('not open for writing') self._tempfile.write(b) return len(b) def writelines(self, lines): if not self.writable(): raise IOError('not open for writing') return self._tempfile.writelines(lines) def truncate(self, size=None): if not self.writable(): raise IOError('not open for writing') if size is None: size = self.tell() self._tempfile.truncate(size) return size