def __init__(self, fn): ParameterSource.__init__(self) fp = gzip.open(fn, 'rb') keyline = fp.readline().lstrip('#').strip() self.keys = [] if keyline: self.keys = eval(keyline) def parseLine(line): if not line.startswith('#'): pNumStr, stored = map(str.strip, line.split('\t', 1)) return ('!' in pNumStr, int(pNumStr.rstrip('!')), map(eval, stored.split('\t'))) self.values = map(parseLine, fp.readlines())
def __init__(self, dataDir, srcName, dataProvider, dataSplitter, dataProc): ParameterSource.__init__(self) (self.dataDir, self.srcName, self.dataProvider, self.dataSplitter, self.dataProc) = \ (dataDir, srcName, dataProvider, dataSplitter, dataProc) if not dataProvider: pass # debug mode - used by scripts - disables resync elif os.path.exists(self.getDataPath('cache.dat') and self.getDataPath('map.tar')): self.dataSplitter.importState(self.getDataPath('map.tar')) else: self.dataProvider.saveState(self.getDataPath('cache.dat')) self.dataSplitter.splitDataset(self.getDataPath('map.tar'), self.dataProvider.getBlocks()) self.maxN = self.dataSplitter.getMaxJobs() self.keepOld = True
def __init__(self, fn): ParameterSource.__init__(self) fp = ZipFile(fn, 'r') try: keyline = fp.readline().lstrip('#').strip() self._keys = [] if keyline: self._keys = parseJSON(keyline) def parseLine(line): if not line.startswith('#'): pNumStr, stored = lmap(str.strip, line.split('\t', 1)) return ('!' in pNumStr, int(pNumStr.rstrip('!')), lmap(parseJSON, stored.split('\t'))) self._values = lmap(parseLine, fp.readlines()) finally: fp.close()
def __init__(self, dataDir, srcName, dataProvider, dataSplitter, dataProc, keepOld = True): ParameterSource.__init__(self) (self._dataDir, self._srcName, self._dataProvider, self._dataSplitter, self._part_proc) = \ (dataDir, srcName, dataProvider, dataSplitter, dataProc) if not dataProvider: pass # debug mode - used by scripts - disables resync elif os.path.exists(self.getDataPath('cache.dat') and self.getDataPath('map.tar')): self._dataSplitter.importPartitions(self.getDataPath('map.tar')) else: DataProvider.saveToFile(self.getDataPath('cache.dat'), self._dataProvider.getBlocks(silent = False)) self._dataSplitter.splitDataset(self.getDataPath('map.tar'), self._dataProvider.getBlocks()) self._maxN = self._dataSplitter.getMaxJobs() self._keepOld = keepOld
def __init__(self, fn): ParameterSource.__init__(self) fp = GZipTextFile(fn, 'r') try: header = fp.readline().lstrip('#').strip() self._output_vn_list = [] if header: self._output_vn_list = parse_json(header) def _parse_line(line): if not line.startswith('#'): pnum_str, stored_json = line.split('\t', 1) is_invalid = '!' in pnum_str pnum = int(pnum_str.replace('!', ' ')) return (is_invalid, pnum, lmap(parse_json, stored_json.strip().split('\t'))) self._values = lmap(_parse_line, fp.readlines()) finally: fp.close()
def __init__(self, fn): ParameterSource.__init__(self) fp = ZipFile(fn, 'r') try: keyline = fp.readline().lstrip('#').strip() self._keys = [] if keyline: self._keys = parseJSON(keyline) def parseLine(line): if not line.startswith('#'): pNumStr, stored = lmap(str.strip, line.split('\t', 1)) return ('!' in pNumStr, int(pNumStr.rstrip('!')), lmap(parseJSON, stored.split('\t'))) self._values = lmap(parseLine, fp.readlines()) finally: fp.close()
def __init__(self, fn): ParameterSource.__init__(self) fp = GZipTextFile(fn, 'r') try: header = fp.readline().lstrip('#').strip() self._output_vn_list = [] if header: self._output_vn_list = parse_json(header) def _parse_line(line): if not line.startswith('#'): pnum_str, stored_json = line.split('\t', 1) is_invalid = '!' in pnum_str pnum = int(pnum_str.replace('!', ' ')) return (is_invalid, pnum, lmap(parse_json, stored_json.strip().split('\t'))) self._values = lmap(_parse_line, fp.readlines()) finally: fp.close()
def __init__(self, dataDir, srcName, dataProvider, dataSplitter, dataProc, keepOld=True): ParameterSource.__init__(self) (self._dataDir, self._srcName, self._dataProvider, self._dataSplitter, self._part_proc) = \ (dataDir, srcName, dataProvider, dataSplitter, dataProc) if not dataProvider: pass # debug mode - used by scripts - disables resync elif os.path.exists( self.getDataPath('cache.dat') and self.getDataPath('map.tar')): self._dataSplitter.importPartitions(self.getDataPath('map.tar')) else: DataProvider.saveToFile(self.getDataPath('cache.dat'), self._dataProvider.getBlocks(silent=False)) self._dataSplitter.splitDataset(self.getDataPath('map.tar'), self._dataProvider.getBlocks()) self._maxN = self._dataSplitter.getMaxJobs() self._keepOld = keepOld
def __init__(self, *keys): ParameterSource.__init__(self) self.keys = map(lambda key: key.lstrip('!'), keys) self.meta = map(lambda key: ParameterMetadata(key.lstrip('!'), untracked = '!' in key), keys)
def __init__(self, key): ParameterSource.__init__(self) self.key = key.lstrip('!') self.meta = ParameterMetadata(self.key, untracked = '!' in key)
def __init__(self, values, keys): ParameterSource.__init__(self) (self.values, self.keys) = (values, keys)
def __init__(self, psrc): ParameterSource.__init__(self) self._psrc = psrc
def __init__(self, *args, **kwargs): ParameterSource.__init__(self) raise APIError('Redirector class initialized')
def __init__(self, *psrc_list): ParameterSource.__init__(self) self._psrc_list = list(_sort_deps(_strip_null_sources(psrc_list))) self._psrc_max_list = lmap(lambda psrc: psrc.get_parameter_len(), self._psrc_list) self._psrc_max = self._init_psrc_max()
def __init__(self, *psources): ParameterSource.__init__(self) self._psourceList = psources self._psourceMaxList = lmap(lambda p: p.getMaxParameters(), self._psourceList) self._maxParameters = self.initMaxParameters()
def __init__(self, values, keys): ParameterSource.__init__(self) (self._values, self._keys) = (values, keys)
def __init__(self, hash_src_list): ParameterSource.__init__(self) self._hash = md5_hex(repr(hash_src_list))
def __init__(self, *psources): ParameterSource.__init__(self) self._psourceList = strip_null_sources(psources) self._psourceMaxList = lmap(lambda p: p.getMaxParameters(), self._psourceList) self._maxParameters = self._initMaxParameters()
def __init__(self, *psrc_list): ParameterSource.__init__(self) self._psrc_list = list(_sort_deps(_strip_null_sources(psrc_list))) self._psrc_max_list = lmap(lambda psrc: psrc.get_parameter_len(), self._psrc_list) self._psrc_max = self._init_psrc_max()
def __init__(self, psrc): ParameterSource.__init__(self) self._psrc = psrc
def __init__(self, *args, **kwargs): ParameterSource.__init__(self) raise APIError('Redirector class initialized')
def __init__(self, key): ParameterSource.__init__(self) self._key = key.lstrip('!') self._meta = ParameterMetadata(self._key, untracked='!' in key)
def __init__(self, psource): ParameterSource.__init__(self) self._psource = psource
def __init__(self, *keys): ParameterSource.__init__(self) self._keys = lmap(lambda key: key.lstrip('!'), keys) self._meta = lmap( lambda key: ParameterMetadata(key.lstrip('!'), untracked='!' in key), keys)
def __init__(self, *psources): ParameterSource.__init__(self) self._psourceList = strip_null_sources(psources) self._psourceMaxList = lmap(lambda p: p.getMaxParameters(), self._psourceList) self._maxParameters = self._initMaxParameters()
def __init__(self, hash_src_list): ParameterSource.__init__(self) self._hash = md5_hex(repr(hash_src_list))