예제 #1
0
 def write(cls, fn, pa):
     fp = ZipFile(fn, 'w')
     try:
         keys = sorted(ifilter(lambda p: not p.untracked, pa.getJobKeys()))
         fp.write('# %s\n' % json.dumps(keys))
         maxN = pa.getMaxJobs()
         if maxN:
             activity = utils.ActivityLog('Writing parameter dump')
             for jobNum in irange(maxN):
                 activity.finish()
                 activity = utils.ActivityLog(
                     'Writing parameter dump [%d/%d]' % (jobNum + 1, maxN))
                 meta = pa.getJobInfo(jobNum)
                 if meta.get(ParameterInfo.ACTIVE, True):
                     fp.write(
                         '%d\t%s\n' %
                         (jobNum,
                          str.join(
                              '\t',
                              imap(lambda k: json.dumps(meta.get(k, '')),
                                   keys))))
                 else:
                     fp.write(
                         '%d!\t%s\n' %
                         (jobNum,
                          str.join(
                              '\t',
                              imap(lambda k: json.dumps(meta.get(k, '')),
                                   keys))))
             activity.finish()
     finally:
         fp.close()
예제 #2
0
	def __init__(self, fn):
		ParameterSource.__init__(self)
		fp = ZipFile(fn, 'r')
		try:
			keyline = fp.readline().lstrip('#').strip()
			self._keys = []
			if keyline:
				self._keys = parseJSON(keyline)
			def parseLine(line):
				if not line.startswith('#'):
					pNumStr, stored = lmap(str.strip, line.split('\t', 1))
					return ('!' in pNumStr, int(pNumStr.rstrip('!')), lmap(parseJSON, stored.split('\t')))
			self._values = lmap(parseLine, fp.readlines())
		finally:
			fp.close()
예제 #3
0
	def _writeJob2PID(self, fn):
		fp = ZipFile(fn, 'w')
		try:
			fp.write('%d\n' % (self._rawSource.getMaxParameters() or 0))
			data = ifilter(lambda jobNum_pNum: jobNum_pNum[0] != jobNum_pNum[1], self._mapJob2PID.items())
			datastr = lmap(lambda jobNum_pNum: '%d:%d' % jobNum_pNum, data)
			fp.write('%s\n' % str.join(',', datastr))
		finally:
			fp.close()
예제 #4
0
	def write(cls, fn, pa):
		fp = ZipFile(fn, 'w')
		try:
			keys = sorted(ifilter(lambda p: not p.untracked, pa.getJobKeys()))
			fp.write('# %s\n' % json.dumps(keys))
			maxN = pa.getMaxJobs()
			if maxN:
				activity = utils.ActivityLog('Writing parameter dump')
				for jobNum in irange(maxN):
					activity.finish()
					activity = utils.ActivityLog('Writing parameter dump [%d/%d]' % (jobNum + 1, maxN))
					meta = pa.getJobInfo(jobNum)
					if meta.get(ParameterInfo.ACTIVE, True):
						fp.write('%d\t%s\n' % (jobNum, str.join('\t', imap(lambda k: json.dumps(meta.get(k, '')), keys))))
					else:
						fp.write('%d!\t%s\n' % (jobNum, str.join('\t', imap(lambda k: json.dumps(meta.get(k, '')), keys))))
				activity.finish()
		finally:
			fp.close()
예제 #5
0
	def _readJob2PID(self):
		fp = ZipFile(self._pathJob2PID, 'r')
		try:
			self.maxN = int(fp.readline())
			if not self.maxN:
				self.maxN = None
			mapInfo = ifilter(identity, imap(str.strip, fp.readline().split(',')))
			self._mapJob2PID = dict(imap(lambda x: tuple(imap(lambda y: int(y.lstrip('!')), x.split(':'))), mapInfo))
			self._activeMap = {}
		finally:
			fp.close()
예제 #6
0
    def __init__(self, fn):
        ParameterSource.__init__(self)
        fp = ZipFile(fn, 'r')
        try:
            keyline = fp.readline().lstrip('#').strip()
            self._keys = []
            if keyline:
                self._keys = parseJSON(keyline)

            def parseLine(line):
                if not line.startswith('#'):
                    pNumStr, stored = lmap(str.strip, line.split('\t', 1))
                    return ('!' in pNumStr, int(pNumStr.rstrip('!')),
                            lmap(parseJSON, stored.split('\t')))

            self._values = lmap(parseLine, fp.readlines())
        finally:
            fp.close()
예제 #7
0
if opts.dataset_show_removed:
	if len(args) < 2:
		utils.exitWithUsage('%s <dataset source 1> <dataset source 2> ... <dataset source N> ' % sys.argv[0])
	removed = []
	oldDP = DataProvider.createInstance('ListProvider', config, args[0], None)
	for new in args[1:]:
		newDP = DataProvider.createInstance('ListProvider', config, new, None)
		(blocksAdded, blocksMissing, blocksChanged) = DataProvider.resyncSources(oldDP.getBlocks(show_stats = False), newDP.getBlocks(show_stats = False))
		for block in blocksMissing:
			tmp = dict(block)
			tmp[-1] = new
			removed.append(tmp)
		oldDP = newDP
	utils.printTabular([(DataProvider.Dataset, 'Dataset'), (DataProvider.BlockName, 'Block'), (-1, 'Removed in file')], removed)

if opts.logfile_decode:
	import base64, gzip
	from grid_control.utils.file_objects import ZipFile
	if opts.logfile_decode.endswith('.gz'):
		fp = ZipFile(opts.logfile_decode, 'r')
	else:
		fp = open(opts.logfile_decode, 'r')

	for line in fp.readlines():
		if line.startswith('(B64) '):
			buffer = BytesBuffer(base64.b64decode(line.replace('(B64) ', '')))
			line = gzip.GzipFile(fileobj = buffer).read().decode('ascii')
		sys.stdout.write(line.rstrip() + '\n')
	fp.close()
예제 #8
0
if opts.dataset_show_removed:
	if len(args) < 2:
		utils.exitWithUsage('%s <dataset source 1> <dataset source 2> ... <dataset source N> ' % sys.argv[0])
	removed = []
	utils.eprint = lambda *x: {}
	oldDP = DataProvider.createInstance('ListProvider', config, args[0], None)
	for new in args[1:]:
		newDP = DataProvider.createInstance('ListProvider', config, new, None)
		(blocksAdded, blocksMissing, blocksChanged) = DataProvider.resyncSources(oldDP.getBlocks(), newDP.getBlocks())
		for block in blocksMissing:
			tmp = dict(block)
			tmp[-1] = new
			removed.append(tmp)
		oldDP = newDP
	utils.printTabular([(DataProvider.Dataset, 'Dataset'), (DataProvider.BlockName, 'Block'), (-1, 'Removed in file')], removed)

if opts.logfile_decode:
	import base64, gzip
	from grid_control.utils.file_objects import ZipFile
	if opts.logfile_decode.endswith('.gz'):
		fp = ZipFile(opts.logfile_decode, 'r')
	else:
		fp = open(opts.logfile_decode, 'r')

	for line in fp.readlines():
		if line.startswith('(B64) '):
			buffer = BytesBuffer(base64.b64decode(line.replace('(B64) ', '')))
			line = gzip.GzipFile(fileobj = buffer).read().decode('ascii')
		sys.stdout.write(line.rstrip() + '\n')