def generate_chunk(self, data): bufsize = 1024 io = StringIO(data) while True: buf = io.read(bufsize) if not buf: return yield buf io.close()
def _dump_pattern(input_file, destination, call_method): # type: (Union[str, BinaryIO], str, Callable) -> Union[None, str] if destination == 'stdout': call_method(input_file, destination=sys.stdout) elif destination == 'string': out = StringIO() call_method(input_file, destination=out) value = out.getvalue() out.close() # free the buffer return value else: the_out_file = _create_default_output_file( input_file) if destination == 'default' else destination with open(the_out_file, 'w') as fi: call_method(input_file, destination=fi)
def docker_pull(self, namespace, repos): # Test pull # Docker -> Index resp = requests.get('{0}/v1/repositories/{1}/{2}/images'.format( self.index_endpoint, namespace, repos), auth=tuple(self.user_credentials), headers={'X-Docker-Token': 'true'}) self.assertEqual(resp.status_code, 200) token = resp.headers.get('x-docker-token') # Here we should use the 'X-Endpoints' returned in a real environment # Docker -> Registry resp = requests.get('{0}/v1/repositories/{1}/{2}/tags/latest'.format( self.registry_endpoint, namespace, repos), headers={'Authorization': 'Token ' + token}) self.assertEqual(resp.status_code, 200, resp.text) self.cookies = resp.cookies # Docker -> Registry image_id = json.loads(resp.text) resp = requests.get('{0}/v1/images/{1}/ancestry'.format( self.registry_endpoint, image_id), cookies=self.cookies) self.update_cookies(resp) self.assertEqual(resp.status_code, 200, resp.text) ancestry = json.loads(resp.text) # We got the ancestry, let's fetch all the images there for image_id in ancestry: json_data, checksum, blob = self.fetch_image(image_id) # check queried checksum and local computed checksum from the image # are the same tmpfile = StringIO() tmpfile.write(blob) tmpfile.seek(0) computed_checksum = checksums.compute_simple(tmpfile, json_data) tmpfile.close() self.assertEqual(checksum, computed_checksum) # Remove image tags resp = requests.delete('{0}/v1/repositories/{1}/{2}/tags'.format( self.registry_endpoint, namespace, repos), cookies=self.cookies) self.assertEqual(resp.status_code, 200, resp.text) self.update_cookies(resp) # Remove image_id, then parent_id store = storage.load() store.remove(os.path.join(store.images, self.image_id)) store.remove(os.path.join(store.images, self.parent_id))
def dump_nitf_file(file_name, dest, over_write=True): """ Utility to dump the NITF header and various subheader details to a configurable destination. Parameters ---------- file_name : str|BinaryIO The path to or file-like object containing a NITF 2.1 or 2.0 file. dest : str 'stdout', 'string', 'default' (will use `file_name+'.header_dump.txt'`), or the path to an output file. over_write : bool If `True`, then overwrite the destination file, otherwise append to the file. Returns ------- None|str There is only a return value if `dest=='string'`. """ if dest == 'stdout': print_nitf(file_name, dest=sys.stdout) return if dest == 'string': out = StringIO() print_nitf(file_name, dest=out) value = out.getvalue() out.close() # free the buffer return value the_out_file = _create_default_output_file( file_name) if dest == 'default' else dest if not os.path.exists(the_out_file) or over_write: with open(the_out_file, 'w') as the_file: print_nitf(file_name, dest=the_file) else: with open(the_out_file, 'a') as the_file: print_nitf(file_name, dest=the_file)
def write_POSCAR(poscar, filename): """ Write the contents of poscar to filename. """ global hashes f = StringIO() f.write("1.0\n") for i in range(3): f.write("{0[0]:>20.15f} {0[1]:>20.15f} {0[2]:>20.15f}\n".format( (poscar["lattvec"][:, i]).tolist())) f.write("{0}\n".format(" ".join(poscar["elements"]))) f.write("{0}\n".format(" ".join([str(i) for i in poscar["numbers"]]))) f.write("Direct\n") for i in range(poscar["positions"].shape[1]): f.write("{0[0]:>20.15f} {0[1]:>20.15f} {0[2]:>20.15f}\n".format( poscar["positions"][:, i].tolist())) if hashes: header = hashlib.sha1(f.getvalue().encode()).hexdigest() else: header = filename with open(filename, "w") as finalf: finalf.write("{0}\n".format(header)) finalf.write(f.getvalue()) f.close()
def main(args): hostname = socket.gethostname() blockList = get_transmit_receive() order = sorted([blockList[key]['pid'] for key in blockList]) order = set(order) nPID = len(order) scr = curses.initscr() curses.noecho() curses.cbreak() scr.keypad(1) scr.nodelay(1) size = scr.getmaxyx() std = curses.A_NORMAL rev = curses.A_REVERSE poll_interval = 1.0 tLastPoll = 0.0 try: sel = 0 while True: t = time.time() ## Interact with the user c = scr.getch() curses.flushinp() if c == ord('q'): break elif c == curses.KEY_UP: sel -= 1 elif c == curses.KEY_DOWN: sel += 1 ## Find the current selected process and see if it has changed newSel = min([nPID - 1, max([0, sel])]) if newSel != sel: tLastPoll = 0.0 sel = newSel ## Do we need to poll the system again? if t - tLastPoll > poll_interval: ## Save what we had before prevList = blockList ## Find all running processes pidDirs = glob.glob(os.path.join(BIFROST_STATS_BASE_DIR, '*')) pidDirs.sort() ## Load the data blockList = get_transmit_receive() ## Sort order = sorted([blockList[key]['pid'] for key in blockList]) order = list(set(order)) nPID = len(order) ## Stats stats = get_statistics(blockList, prevList) ## Mark tLastPoll = time.time() ## Clear act = None ## For sel to be valid - this takes care of any changes between when ## we get what to select and when we polled the bifrost logs sel = min([nPID - 1, sel]) ## Display k = 0 ### General - selected try: output = ' PID: %i on %s' % (order[sel], hostname) except IndexError: output = ' PID: n/a on %s' % (hostname, ) output += ' ' * (size[1] - len(output) - len(os.path.basename(__file__)) - 1) output += os.path.basename(__file__) + ' ' output += '\n' k = _add_line(scr, k, 0, output, std) ### General - header k = _add_line(scr, k, 0, ' ', std) output = '%6s %9s %6s %9s %6s' % ( 'PID', 'RX Rate', 'RX #/s', 'TX Rate', 'TX #/s') output += ' ' * (size[1] - len(output)) output += '\n' k = _add_line(scr, k, 0, output, rev) ### Data for o in order: curr = stats[o] if o == order[sel]: act = curr drateR, prateR = curr['rx']['drate'], curr['rx']['prate'] drateR, drateuR = _set_units(drateR) drateT, prateT = curr['tx']['drate'], curr['tx']['prate'] drateT, drateuT = _set_units(drateT) output = '%6i %7.2f%2s %6i %7.2f%2s %6i\n' % ( o, drateR, drateuR, prateR, drateT, drateuT, prateT) try: if o == order[sel]: sty = std | curses.A_BOLD else: sty = std except IndexError: sty = std k = _add_line(scr, k, 0, output, sty) if k > size[0] - 9: break while k < size[0] - 9: output = ' ' k = _add_line(scr, k, 0, output, std) ### Details of selected output = 'Details - %8s %19s %19s' % ( stats['updated'].strftime("%H:%M:%S"), 'RX', 'TX') output += ' ' * (size[1] - len(output)) output += '\n' k = _add_line(scr, k, 0, output, rev) if act is not None: output = 'Good: %18iB %18iB\n' % ( act['rx']['good'], act['tx']['good']) k = _add_line(scr, k, 0, output, std) output = 'Missing: %18iB %18iB\n' % ( act['rx']['missing'], act['tx']['missing']) k = _add_line(scr, k, 0, output, std) output = 'Invalid: %18iB %18iB\n' % ( act['rx']['invalid'], act['tx']['invalid']) k = _add_line(scr, k, 0, output, std) output = 'Late: %18iB %18iB\n' % ( act['rx']['late'], act['tx']['late']) k = _add_line(scr, k, 0, output, std) output = 'Global Missing: %18.2f%% %18.2f%%\n' % ( act['rx']['gloss'], act['tx']['gloss']) k = _add_line(scr, k, 0, output, std) output = 'Current Missing: %18.2f%% %18.2f%%\n' % ( act['rx']['closs'], act['tx']['closs']) k = _add_line(scr, k, 0, output, std) output = 'Command: %s' % act['cmd'] k = _add_line(scr, k, 0, output[:size[1]], std) ### Clear to the bottom scr.clrtobot() ### Refresh scr.refresh() ## Sleep time.sleep(_REDRAW_INTERVAL_SEC) except KeyboardInterrupt: pass except Exception as error: exc_type, exc_value, exc_traceback = sys.exc_info() fileObject = StringIO() traceback.print_tb(exc_traceback, file=fileObject) tbString = fileObject.getvalue() fileObject.close() scr.keypad(0) curses.echo() curses.nocbreak() curses.endwin() try: print("%s: failed with %s at line %i" % (os.path.basename(__file__), str(error), traceback.tb_lineno(exc_traceback))) for line in tbString.split('\n'): print(line) except NameError: pass