def write(cls, fn, pa): fp = ZipFile(fn, 'w') try: keys = sorted(ifilter(lambda p: not p.untracked, pa.getJobKeys())) fp.write('# %s\n' % json.dumps(keys)) maxN = pa.getMaxJobs() if maxN: activity = utils.ActivityLog('Writing parameter dump') for jobNum in irange(maxN): activity.finish() activity = utils.ActivityLog( 'Writing parameter dump [%d/%d]' % (jobNum + 1, maxN)) meta = pa.getJobInfo(jobNum) if meta.get(ParameterInfo.ACTIVE, True): fp.write( '%d\t%s\n' % (jobNum, str.join( '\t', imap(lambda k: json.dumps(meta.get(k, '')), keys)))) else: fp.write( '%d!\t%s\n' % (jobNum, str.join( '\t', imap(lambda k: json.dumps(meta.get(k, '')), keys)))) activity.finish() finally: fp.close()
def __init__(self, head, data): ConsoleTable.__init__(self) self._write_line( json.dumps({ 'data': data, 'header': head }, sort_keys=True))
def _formatFileEntry(self, k_s_v): (x, y, z) = k_s_v if x in [DataSplitter.Metadata, DataSplitter.MetadataHeader]: return (x, y, json.dumps(z)) elif isinstance(z, list): return (x, y, str.join(',', z)) return (x, y, z)
def write(cls, fn, psrc_len, psrc_metadata, psp_iter): # write parameter part of parameter adapter fp = GZipTextFile(fn, 'w') try: vn_list = sorted(lmap(lambda p: p.value, ifilter(lambda p: not p.untracked, psrc_metadata))) fp.write('# %s\n' % json.dumps(vn_list)) progress = ProgressActivity('Writing parameter dump', progress_max=psrc_len) for jobnum, psp in enumerate(psp_iter): progress.update_progress(jobnum) psp_str = str.join('\t', imap(lambda k: json.dumps(psp.get(k, '')), vn_list)) if psp.get(ParameterInfo.ACTIVE, True): fp.write('%d\t%s\n' % (jobnum, psp_str)) else: fp.write('%d!\t%s\n' % (jobnum, psp_str)) progress.finish() finally: fp.close()
def getMetadata(fi, idxList): idxList = ifilter( lambda idx: idx < len(fi[DataProvider.Metadata]), idxList) return json.dumps( lmap(lambda idx: fi[DataProvider.Metadata][idx], idxList))
def saveToStream(stream, dataBlocks, stripMetadata = False): writer = StringBuffer() for block in dataBlocks: writer.write('[%s#%s]\n' % (block[DataProvider.Dataset], block[DataProvider.BlockName])) if DataProvider.Nickname in block: writer.write('nickname = %s\n' % block[DataProvider.Nickname]) if DataProvider.DatasetID in block: writer.write('id = %d\n' % block[DataProvider.DatasetID]) if DataProvider.NEntries in block: writer.write('events = %d\n' % block[DataProvider.NEntries]) if block.get(DataProvider.Locations) is not None: writer.write('se list = %s\n' % str.join(',', block[DataProvider.Locations])) cPrefix = os.path.commonprefix(lmap(lambda x: x[DataProvider.URL], block[DataProvider.FileList])) cPrefix = str.join('/', cPrefix.split('/')[:-1]) if len(cPrefix) > 6: writer.write('prefix = %s\n' % cPrefix) formatter = lambda x: x.replace(cPrefix + '/', '') else: formatter = identity writeMetadata = (DataProvider.Metadata in block) and not stripMetadata if writeMetadata: (idxListBlock, idxListFile) = DataProvider.classifyMetadataKeys(block) def getMetadata(fi, idxList): return json.dumps(lmap(lambda idx: fi[DataProvider.Metadata][idx], idxList)) writer.write('metadata = %s\n' % json.dumps(lmap(lambda idx: block[DataProvider.Metadata][idx], idxListBlock + idxListFile))) if idxListBlock: writer.write('metadata common = %s\n' % getMetadata(block[DataProvider.FileList][0], idxListBlock)) for fi in block[DataProvider.FileList]: writer.write('%s = %d' % (formatter(fi[DataProvider.URL]), fi[DataProvider.NEntries])) if writeMetadata and idxListFile: writer.write(' %s' % getMetadata(fi, idxListFile)) writer.write('\n') writer.write('\n') stream.write(writer.getvalue())
def _get_metadata_str(fi, idx_list): idx_list = ifilter( lambda idx: idx < len(fi[DataProvider.Metadata]), idx_list) return json.dumps( lmap(lambda idx: fi[DataProvider.Metadata][idx], idx_list))
def _format_file_entry(self, k_s_v): # Function to format a single file entry with metadata (key, separator, value) = k_s_v if key in [DataSplitter.Metadata, DataSplitter.MetadataHeader]: return (key, separator, json.dumps(value)) elif isinstance(value, list): return (key, separator, str.join(',', value)) return (key, separator, value)
def write(cls, fn, pa): fp = ZipFile(fn, 'w') try: keys = sorted(ifilter(lambda p: not p.untracked, pa.getJobKeys())) fp.write('# %s\n' % json.dumps(keys)) maxN = pa.getMaxJobs() if maxN: activity = utils.ActivityLog('Writing parameter dump') for jobNum in irange(maxN): activity.finish() activity = utils.ActivityLog('Writing parameter dump [%d/%d]' % (jobNum + 1, maxN)) meta = pa.getJobInfo(jobNum) if meta.get(ParameterInfo.ACTIVE, True): fp.write('%d\t%s\n' % (jobNum, str.join('\t', imap(lambda k: json.dumps(meta.get(k, '')), keys)))) else: fp.write('%d!\t%s\n' % (jobNum, str.join('\t', imap(lambda k: json.dumps(meta.get(k, '')), keys)))) activity.finish() finally: fp.close()
def write(cls, fn, psrc_len, psrc_metadata, psp_iter): # write parameter part of parameter adapter fp = GZipTextFile(fn, 'w') try: vn_list = sorted( lmap(lambda p: p.value, ifilter(lambda p: not p.untracked, psrc_metadata))) fp.write('# %s\n' % json.dumps(vn_list)) progress = ProgressActivity('Writing parameter dump', progress_max=psrc_len) for jobnum, psp in enumerate(psp_iter): progress.update_progress(jobnum) psp_str = str.join( '\t', imap(lambda k: json.dumps(psp.get(k, '')), vn_list)) if psp.get(ParameterInfo.ACTIVE, True): fp.write('%d\t%s\n' % (jobnum, psp_str)) else: fp.write('%d!\t%s\n' % (jobnum, psp_str)) progress.finish() finally: fp.close()
def saveToStream(stream, dataBlocks, stripMetadata = False): writer = StringBuffer() write_separator = False for block in dataBlocks: if write_separator: writer.write('\n') writer.write('[%s]\n' % DataProvider.bName(block)) if DataProvider.Nickname in block: writer.write('nickname = %s\n' % block[DataProvider.Nickname]) if DataProvider.NEntries in block: writer.write('events = %d\n' % block[DataProvider.NEntries]) if block.get(DataProvider.Locations) is not None: writer.write('se list = %s\n' % str.join(',', block[DataProvider.Locations])) cPrefix = os.path.commonprefix(lmap(lambda x: x[DataProvider.URL], block[DataProvider.FileList])) cPrefix = str.join('/', cPrefix.split('/')[:-1]) if len(cPrefix) > 6: writer.write('prefix = %s\n' % cPrefix) formatter = lambda x: x.replace(cPrefix + '/', '') else: formatter = identity writeMetadata = (DataProvider.Metadata in block) and not stripMetadata if writeMetadata: (idxListBlock, idxListFile) = DataProvider.classifyMetadataKeys(block) def getMetadata(fi, idxList): idxList = ifilter(lambda idx: idx < len(fi[DataProvider.Metadata]), idxList) return json.dumps(lmap(lambda idx: fi[DataProvider.Metadata][idx], idxList)) writer.write('metadata = %s\n' % json.dumps(lmap(lambda idx: block[DataProvider.Metadata][idx], idxListBlock + idxListFile))) if idxListBlock: writer.write('metadata common = %s\n' % getMetadata(block[DataProvider.FileList][0], idxListBlock)) for fi in block[DataProvider.FileList]: writer.write('%s = %d' % (formatter(fi[DataProvider.URL]), fi[DataProvider.NEntries])) if writeMetadata and idxListFile: writer.write(' %s' % getMetadata(fi, idxListFile)) writer.write('\n') stream.write(writer.getvalue()) writer.seek(0) writer.truncate(0) write_separator = True yield block
def _main(): parser = ScriptOptions() parser.add_text(None, None, 'url', default='http://pccmsdqm04.cern.ch/runregistry/xmlrpc', help='URL to runregistry [Default:%s]') parser.add_text( None, None, 'run', default='Collisions10', help= 'Specify run era that will be queried for the lumi json file [Default:%s]' ) options = parser.script_parse() server_proxy_cls = resolve_fun('xmlrpc.client:ServerProxy', 'xmlrpclib:ServerProxy') server = server_proxy_cls(options.opts.url).DataExporter data = server.export('RUNLUMISECTION', 'GLOBAL', 'json', {'groupName': options.opts.run}) logging.getLogger('script').info(json.dumps(data))
def __init__(self, head, data): ConsoleTable.__init__(self) self._write_line(json.dumps({'data': data, 'header': head}, sort_keys=True))
def getMetadata(fi, idxList): idxList = ifilter(lambda idx: idx < len(fi[DataProvider.Metadata]), idxList) return json.dumps(lmap(lambda idx: fi[DataProvider.Metadata][idx], idxList))
def getMetadata(fi, idxList): return json.dumps(lmap(lambda idx: fi[DataProvider.Metadata][idx], idxList))
def save_to_stream(stream, block_iter, strip_metadata=False): writer = StringBuffer() write_separator = False for block in block_iter: if write_separator: writer.write('\n') writer.write('[%s]\n' % DataProvider.get_block_id(block)) if DataProvider.Nickname in block: writer.write('nickname = %s\n' % block[DataProvider.Nickname]) if DataProvider.NEntries in block: writer.write('events = %d\n' % block[DataProvider.NEntries]) if block.get(DataProvider.Locations) is not None: writer.write('se list = %s\n' % str.join(',', block[DataProvider.Locations])) common_prefix = os.path.commonprefix( lmap(itemgetter(DataProvider.URL), block[DataProvider.FileList])) common_prefix = str.join('/', common_prefix.split('/')[:-1]) if len(common_prefix) > 6: def _formatter(value): return value.replace(common_prefix + '/', '') writer.write('prefix = %s\n' % common_prefix) else: _formatter = identity do_write_metadata = (DataProvider.Metadata in block) and not strip_metadata if do_write_metadata: def _get_metadata_str(fi, idx_list): idx_list = ifilter( lambda idx: idx < len(fi[DataProvider.Metadata]), idx_list) return json.dumps( lmap(lambda idx: fi[DataProvider.Metadata][idx], idx_list)) (metadata_idx_list_block, metadata_idx_list_file) = _split_metadata_idx_list(block) metadata_header_str = json.dumps( lmap(lambda idx: block[DataProvider.Metadata][idx], metadata_idx_list_block + metadata_idx_list_file)) writer.write('metadata = %s\n' % metadata_header_str) if metadata_idx_list_block: metadata_str = _get_metadata_str( block[DataProvider.FileList][0], metadata_idx_list_block) writer.write('metadata common = %s\n' % metadata_str) for fi in block[DataProvider.FileList]: writer.write('%s = %d' % (_formatter( fi[DataProvider.URL]), fi[DataProvider.NEntries])) if do_write_metadata and metadata_idx_list_file: writer.write(' %s' % _get_metadata_str(fi, metadata_idx_list_file)) writer.write('\n') stream.write(writer.getvalue()) erase_content(writer) write_separator = True yield block writer.close()