def show_report(self, job_db, jobnum_list): try: import numpy except Exception: raise InstallationError('numpy is not installed!') try: from mpl_toolkits.basemap import Basemap except Exception: raise InstallationError('basemap is not installed!') hostname_dict = _get_hostname_dict(job_db, jobnum_list) pos_list = _get_positions(hostname_dict) bounds = _get_bl_tr(pos_list, margin=10) aspect = (bounds[1][0] - bounds[0][0]) / (bounds[1][1] - bounds[0][1]) buffer = BytesBuffer() (fig, axis) = _setup_figure(aspect) base_map = Basemap(projection='cyl', lat_0=0, lon_0=0, llcrnrlon=bounds[0][0], llcrnrlat=bounds[0][1], urcrnrlon=bounds[1][0], urcrnrlat=bounds[1][1]) _draw_map(numpy, fig, axis, buffer, base_map, pos_list) self._show_image('map.png', buffer) buffer.close()
def _finish_hist(self, name_fig_axis, use_legend=False): if use_legend: matplotlib.pyplot.legend(loc='upper right', numpoints=1, frameon=False, ncol=2) image_type_list = ['png', 'pdf'] for image_type in image_type_list: buffer = BytesBuffer() matplotlib.pyplot.savefig(buffer, format=image_type) self._show_image(name_fig_axis[0] + '.' + image_type, buffer) buffer.close()
def _decode_stream(fp): for line in fp.readlines(): if line.startswith('(B64) '): buffer_obj = BytesBuffer( base64.b64decode(line.replace('(B64) ', ''))) line = gzip.GzipFile(fileobj=buffer_obj).read().decode('ascii') logging.getLogger('script').info(line.rstrip())
def _getPartition(self, key): if not self._cacheKey == key / 100: self._cacheKey = key / 100 subTarFileObj = self._tar.extractfile('%03dXX.tgz' % (key / 100)) subTarFileObj = BytesBuffer(gzip.GzipFile(fileobj = subTarFileObj).read()) # 3-4x speedup for sequential access self._cacheTar = tarfile.open(mode = 'r', fileobj = subTarFileObj) data = self._fmt.parse(self._cacheTar.extractfile('%05d/info' % key).readlines(), keyParser = {None: int}, valueParser = self._parserMap) fileList = lmap(bytes2str, self._cacheTar.extractfile('%05d/list' % key).readlines()) if DataSplitter.CommonPrefix in data: fileList = imap(lambda x: '%s/%s' % (data[DataSplitter.CommonPrefix], x), fileList) data[DataSplitter.FileList] = lmap(str.strip, fileList) return data
if opts.dataset_show_removed: if len(args) < 2: utils.exitWithUsage('%s <dataset source 1> <dataset source 2> ... <dataset source N> ' % sys.argv[0]) removed = [] oldDP = DataProvider.createInstance('ListProvider', config, args[0], None) for new in args[1:]: newDP = DataProvider.createInstance('ListProvider', config, new, None) (blocksAdded, blocksMissing, blocksChanged) = DataProvider.resyncSources(oldDP.getBlocks(show_stats = False), newDP.getBlocks(show_stats = False)) for block in blocksMissing: tmp = dict(block) tmp[-1] = new removed.append(tmp) oldDP = newDP utils.printTabular([(DataProvider.Dataset, 'Dataset'), (DataProvider.BlockName, 'Block'), (-1, 'Removed in file')], removed) if opts.logfile_decode: import base64, gzip from grid_control.utils.file_objects import ZipFile if opts.logfile_decode.endswith('.gz'): fp = ZipFile(opts.logfile_decode, 'r') else: fp = open(opts.logfile_decode, 'r') for line in fp.readlines(): if line.startswith('(B64) '): buffer = BytesBuffer(base64.b64decode(line.replace('(B64) ', ''))) line = gzip.GzipFile(fileobj = buffer).read().decode('ascii') sys.stdout.write(line.rstrip() + '\n') fp.close()
def _createSubTar(self, subTarFileName): subTarFileObj = BytesBuffer() subTarFile = tarfile.open(mode = 'w:gz', fileobj = subTarFileObj) return (subTarFile, subTarFileObj, subTarFileName)
def _create_nested_tar(self, fn): nested_tar_fp = BytesBuffer() nested_tar = tarfile.open(mode='w:gz', fileobj=nested_tar_fp) nested_tar.nested_tar_fp = nested_tar_fp nested_tar.nested_fn = fn return nested_tar
def _open_nested_tar(self, nested_fn): nested_tar_fp = self._tar.extractfile(nested_fn) nested_tar_fp = BytesBuffer(gzip.GzipFile(fileobj=nested_tar_fp).read()) return tarfile.open(mode='r', fileobj=nested_tar_fp)