def unpack(input_file, output_dir): name, ext = utils.splitext(input_file) if ext == '.zip': _unzip(input_file, output_dir) elif ext == '.tar.bz2': _untar(input_file, output_dir) else: raise Exception('unsupported archive type')
def get_page_state(self, user, archive_path): """return a tuple (current page, total pages) for an archive for the user.""" key = unicode(archive_path) if user not in Dict['read_states'] or key not in Dict['read_states'][user]: cur, total = (-1, -1) else: cur, total = Dict['read_states'][user][key] if total <= 0: a = archives.get_archive(archive_path) total = len([x for x in a.namelist() if utils.splitext(x)[-1] in utils.IMAGE_FORMATS]) return (int(cur), int(total))
def vary_calcsfh_calls(phot, fake, params, outfile, subs, davs, calcsfh=calcsfh, destination=None, check=False, nproc=12, extra='', imf=None): """ loop over param files to create output filenames and calcsfh calls for parameters that vary that are not in the calcsfh parameter file. """ runtot = len(params) * len(davs) * len(subs) print('Requested {0:d} calcsfh calls'.format(runtot)) line = '' inproc = 0 for sub in subs: subfmt = '' if sub is not None: subfmt = '_{0:s}'.format(sub) for dav in davs: for param in params: parfile = param if destination is not None: parfile = os.path.join(destination, os.path.split(param)[1]) prefx, _ = splitext(parfile) suffx = 'dav{0:g}{1:s}{2:s}_ssp'.format(dav, subfmt, extra) name = '_'.join([prefx, suffx]) out = '{0:s}{1:s}'.format(name, OUTEXT) scrn = '{0:s}{1:s}'.format(name, SCRNEXT) if check and os.path.isfile(out): print('{0:s} exists, not overwriting'.format(out)) else: inproc += 1 flags = getflags(dav, sub=sub, imf=imf) line += ' '.join([ calcsfh, param, phot, fake, out, flags, '>', scrn, '&\n' ]) if nproc == inproc: line += 'wait \n' inproc = 0 writeorappend(outfile, line)
def vary_matchparam(param_file, varyarrs=None, power_law_imf=True, params=None): """ Vary parameters from a match param template file. param_file : string calcsfh input (aka parameter) file varyarrs : dict a dictionary of array values where each key is XXXarr where XXX is a key in calcsfh_input_parameter power_law_imf : bool passed to calcsfh_input_parameter params : dict parameters to overwite param_file with but not vary. (probably tmin, tmax) Returns ------- new_names : list list of string new parameter file names (with new parameters in the filename) that were written """ new_names = [] varyarrs = {} or varyarrs params = {} or params pname, ext = splitext(param_file) template = read_calcsfh_param(param_file) template.update(params) # force using tbin, tmin, tmax: del template['ntbins'] for vals in itertools.product(*varyarrs.values()): # Add the varied parameters to the filename name = [] for i, val in enumerate(vals): key = list(varyarrs.keys())[i].replace('arr', '') template[key] = val name.append('{}{:g}'.format(key, val)) new_param = calcsfh_input_parameter(power_law_imf=power_law_imf, **template) new_name = '{}_{}.{}'.format(pname, '_'.join(np.sort(name)), ext) with open(new_name, 'w') as outp: outp.write(new_param) # print('wrote {}'.format(new_name)) new_names.append(new_name) return new_names
def unpack_dir(self, input_dir, output_dir, cache=None, config=None): # config_path = config.get_config_path() # if cache and config and cache.entry_has_changed(config_path): # # if config has changes from last repack then we need to fully rerun unpack # cache.invalidate() # cache.update_entry(config_path) if not os.path.exists(output_dir): os.makedirs(output_dir) for input_file in os.listdir(input_dir): name, __ = utils.splitext(input_file) input_file = os.path.join(input_dir, input_file) module = config.get_module(name) if not config or module != None: self.unpack_file(input_file, output_dir, cache, module)
def get_page_state(self, user, archive_path): """return a tuple (current page, total pages) for an archive for the user.""" key = unicode(archive_path) if user not in Dict['read_states'] or key not in Dict['read_states'][ user]: cur, total = (-1, -1) else: cur, total = Dict['read_states'][user][key] if total <= 0 or total == 1: try: a = archives.get_archive(archive_path) except archives.ArchiveError: total = 1 else: total = len([ x for x in a.namelist() if utils.splitext(x)[-1] in utils.IMAGE_FORMATS ]) return (int(cur), int(total))
def Comic(archive_path, user=None, page=0): """Return an oc with all pages in archive_path. if page > 0 return pages [page - Prefs['resume_length']:]""" oc = ObjectContainer(title2=unicode(os.path.basename(archive_path)), no_cache=True) try: archive = archives.get_archive(archive_path) except archives.ArchiveError as e: Log.Error(e) return error_message('bad archive', 'unable to open archive: {}'.format(archive_path)) for f in utils.sorted_nicely(archive.namelist()): page_title, ext = utils.splitext(f) if not ext or ext not in utils.IMAGE_FORMATS: continue decoration = None if page > 0: m = utils.PAGE_NUM_REGEX.search(f) if m: page_num = int(m.group(1)) if page_num < page - int(Prefs['resume_length']): continue if page_num <= page: decoration = '>' page_title = utils.basename(page_title) if decoration is not None: page_title = '{} {}'.format(decoration, page_title) if type(page_title) != unicode: try: page_title = page_title.decode('cp437') except Exception: try: page_title = unicode(page_title, errors='replace') except Exception: pass oc.add(CreatePhotoObject( media_key=Callback(GetImage, archive_path=String.Encode(archive_path), filename=String.Encode(f), user=user, extension=ext.lstrip('.'), time=int(time.time()) if bool(Prefs['prevent_caching']) else 0), rating_key=hashlib.sha1('{}{}{}'.format(archive_path, f, user)).hexdigest(), title=page_title, thumb=utils.thumb_transcode(Callback(get_thumb, archive_path=archive_path, filename=f)))) return oc
def Comic(archive_path, user=None, page=0): """Return an oc with all pages in archive_path. if page > 0 return pages [page - Prefs['resume_length']:]""" oc = ObjectContainer(title2=unicode(os.path.basename(archive_path)), no_cache=True) try: archive = archives.get_archive(archive_path) except archives.ArchiveError as e: Log.Error(e) return error_message('bad archive', 'unable to open archive: {}'.format(archive_path)) for f in utils.sorted_nicely(archive.namelist()): page_title, ext = utils.splitext(f) if not ext or ext.lower() not in utils.IMAGE_FORMATS: continue decoration = None if page > 0: m = utils.PAGE_NUM_REGEX.search(f) if m: page_num = int(m.group(1)) if page_num < page - int(Prefs['resume_length']): continue if page_num <= page: decoration = '>' page_title = utils.basename(page_title) if decoration is not None: page_title = '{} {}'.format(decoration, page_title) if type(page_title) != unicode: try: page_title = page_title.decode('cp437') except Exception: try: page_title = unicode(page_title, errors='replace') except Exception: pass oc.add(CreatePhotoObject( media_key=Callback(GetImage, archive_path=String.Encode(archive_path), filename=String.Encode(f), user=user, extension=ext.lstrip('.'), time=int(time.time()) if bool(Prefs['prevent_caching']) else 0), rating_key=hashlib.sha1('{}{}{}'.format(archive_path, f, user)).hexdigest(), title=page_title, thumb=utils.thumb_transcode(Callback(get_thumb, archive_path=archive_path, filename=f)))) return oc
def get_cover(archive_path): """Return the contents of the first file in `archive_path`.""" archive = archives.get_archive(archive_path) x = sorted([x for x in archive.namelist() if utils.splitext(x)[-1] in utils.IMAGE_FORMATS]) if x: return utils.data_object(archive, x[0])
def get_output_path(input_file, output_dir): input_file = os.path.abspath(input_file) input_file_name, __ = utils.splitext(os.path.basename(input_file)) output_dir = os.path.abspath(output_dir) output_dir = os.path.join(output_dir, input_file_name) return output_dir
def get_cover(archive_path): """Return the contents of the first file in `archive_path`.""" archive = archives.get_archive(archive_path) x = sorted([x for x in archive.namelist() if utils.splitext(x)[-1].lower() in utils.IMAGE_FORMATS]) if x: return utils.data_object(archive, x[0])