def main(): import utils if len(sys.argv) > 1: arf = utils.ArchiveFile(sys.argv[1]) cfg.set_override_config("something", 'newvalue!') cfg.load_configs_for_archive(arf) print cfg.get()
def plot_panel(self, arf, ax): # Get and prep archive ar = arf.get_archive() usetemplate = self.show_template if usetemplate: stdfn = toas.get_standard(arf, analytic=False) if os.path.exists(stdfn): # Standard exists stdarf = utils.ArchiveFile(stdfn) # Scrunch it fully stdar = stdarf.get_archive() stdar.pscrunch() stdar.fscrunch() stdar.tscrunch() if self.centre_prof: stdar.centre_max_bin() # Align profile with standard profile phs, err = ar.get_Profile(0, 0, 0).shift(stdar.get_Profile(0, 0, 0)) ar.rotate_phase(phs) # Get and scale profile prof = ar.get_data().squeeze() prof -= np.median(prof) prof /= np.median(np.abs(prof)) template = stdar.get_data().squeeze() try: amp, offset = clean_utils.fit_template(prof, template) except errors.FitError: warnings.warn("Error when scaling template. " \ "Template will not be shown.", \ errors.CoastGuardWarning) usetemplate = False else: template = amp * template - offset else: warnings.warn("No template available. " \ "template will not be shown.", \ errors.CoastGuardWarning) usetemplate = False if not usetemplate: # This isn't an else-clause of the above because there are # cases where we turn template-showing off. if self.centre_prof: utils.print_info("Centering profile...", 2) ar.centre_max_bin() # Get and scale profile prof = ar.get_data().squeeze() prof -= np.median(prof) prof /= np.median(np.abs(prof)) # plot phases = np.linspace(0, 1.0, len(prof), endpoint=False) ax.plot(phases, prof, 'k-') if usetemplate: ax.plot(phases, template, 'r-', lw=1.5, alpha=0.5)
def set_chapters(self, gallery_object, add_to_model=True): path = gallery_object.path chap_container = gallerydb.ChaptersContainer(gallery_object) metafile = utils.GMetafile() try: log_d('Listing dir...') con = scandir.scandir(path) # list all folders in gallery dir log_i('Gallery source is a directory') log_d('Sorting') chapters = sorted([ sub.path for sub in con if sub.is_dir() or sub.name.endswith(utils.ARCHIVE_FILES) ]) #subfolders # if gallery has chapters divided into sub folders if len(chapters) != 0: log_d('Chapters divided in folders..') for ch in chapters: chap = chap_container.create_chapter() chap.title = utils.title_parser(ch)['title'] chap.path = os.path.join(path, ch) metafile.update(utils.GMetafile(chap.path)) chap.pages = len(list(scandir.scandir(chap.path))) else: #else assume that all images are in gallery folder chap = chap_container.create_chapter() chap.title = utils.title_parser( os.path.split(path)[1])['title'] chap.path = path metafile.update(utils.GMetafile(path)) chap.pages = len(list(scandir.scandir(path))) except NotADirectoryError: if path.endswith(utils.ARCHIVE_FILES): gallery_object.is_archive = 1 log_i("Gallery source is an archive") archive_g = sorted(utils.check_archive(path)) for g in archive_g: chap = chap_container.create_chapter() chap.path = g chap.in_archive = 1 metafile.update(utils.GMetafile(g, path)) arch = utils.ArchiveFile(path) chap.pages = len(arch.dir_contents(g)) arch.close() metafile.apply_gallery(gallery_object) if add_to_model: self.SERIES.emit([gallery_object]) log_d('Sent gallery to model')
def main(): print "" print " clean.py" print " Patrick Lazarus" print "" file_list = args.files + args.from_glob to_exclude = args.excluded_files + args.excluded_by_glob to_clean = utils.exclude_files(file_list, to_exclude) print "Number of input files: %d" % len(to_clean) # Read configurations for infn in to_clean: inarf = utils.ArchiveFile(infn) config.cfg.load_configs_for_archive(inarf) outfn = utils.get_outfn(args.outfn, inarf) shutil.copy(inarf.fn, outfn) outarf = utils.ArchiveFile(outfn) ar = outarf.get_archive() try: for name, cfgstrs in args.cleaner_queue: # Set up the cleaner cleaner = cleaners.load_cleaner(name) for cfgstr in cfgstrs: cleaner.parse_config_string(cfgstr) cleaner.run(ar) except: # An error prevented cleaning from being successful # Remove the output file because it may confuse the user #if os.path.exists(outfn): # os.remove(outfn) raise finally: ar.unload(outfn) print "Cleaned archive: %s" % outfn
def clean_archive(inarf, outfn, clean_re=None, *args, **kwargs): import psrchive # Temporarily, because python bindings # are not available on all computers if clean_re is None: clean_re = config.cfg.clean_strategy try: outfn = utils.get_outfn(outfn, inarf) shutil.copy(inarf.fn, outfn) outarf = utils.ArchiveFile(outfn) trim_edge_channels(outarf) prune_band(outarf) remove_bad_channels(outarf) remove_bad_subints(outarf) matching_cleaners = [ clnr for clnr in cleaners if clean_re and re.search(clean_re, clnr) ] if len(matching_cleaners) == 1: ar = psrchive.Archive_load(outarf.fn) cleaner = eval(matching_cleaners[0]) utils.print_info( "Cleaning using '%s(...)'." % matching_cleaners[0], 2) cleaner(ar, *args, **kwargs) ar.unload(outfn) elif len(matching_cleaners) == 0: utils.print_info("No cleaning strategy selected. Skipping...", 2) else: raise errors.CleanError("Bad cleaner selection. " \ "'%s' has %d matches." % \ (clean_re, len(matching_cleaners))) except: # An error prevented cleaning from being successful # Remove the output file because it may confuse the user if os.path.exists(outfn): os.remove(outfn) raise return outarf
def get_archives(arfns, sortkeys=['mjd', 'rcvr', 'name']): arfs = [utils.ArchiveFile(arfn) for arfn in arfns] for sortkey in sortkeys: if sortkey.endswith("_rev"): sortkey = sortkey[:-4] rev = True utils.print_info("Sorting (in reverse) by %s..." % sortkey, 2) else: rev = False utils.print_info("Sorting by %s..." % sortkey, 2) if utils.header_param_types.get(sortkey) == str: arfs.sort(key=lambda x: x[sortkey].lower(), reverse=rev) else: arfs.sort(key=lambda x: x[sortkey], reverse=rev) # Pre-process archives for arf in arfs: ar = arf.get_archive() ar.dedisperse() ar.remove_baseline() ar.fscrunch() ar.tscrunch() ar.pscrunch() return arfs
def reduce_archives(self): """Group input files into sub-bands then remove the edges of each sub-band to remove the artifacts caused by aliasing. Finally, combine the sub-bands into a single output file. The combined sub-band files are not saved. Inputs: None Outputs: outfn: The final reduced file name. toas: TOA strings. """ if len(self.infns) > 1: combinearfs = combine.combine_all(self.infns, self.basenm+".cmb") else: combinearfs = self.infns cleanarfs = [] toastrs = [] for combinearf in combinearfs: if self.is_asterix: # Correct the file header combinearf = utils.correct_asterix_header(combinearf) # Reload configurations config.cfg.load_configs_for_archive(combinearf) # Create diagnostic plots for pre-cleaned data combinearf.get_archive().update() diagnose.make_composite_summary_plot_psrplot(combinearf) preproc = 'C,D,B 128,F 32' if combinearf['nsub'] > 32: preproc += ",T 32" diagnose.make_composite_summary_plot_psrplot(combinearf, preproc, \ combinearf.fn+".scrunched.ps") # Clean the data utils.print_info("Cleaning %s" % combinearf.fn, 1) # Load cleaners here because each data file might # have different configurations. The configurations # are set when the cleaner is loaded. cleaner_queue = [cleaners.load_cleaner('rcvrstd'), \ cleaners.load_cleaner('surgical')] try: ar = combinearf.get_archive() for cleaner in cleaner_queue: cleaner.run(ar) except: # An error prevented cleaning from being successful # Remove the output file because it may confuse the user if os.path.exists(self.outfn): os.remove(self.outfn) raise finally: combinearf.get_archive().unload(self.outfn) cleanarf = utils.ArchiveFile(self.outfn) # Re-create diagnostic plots for clean data diagnose.make_composite_summary_plot_psrplot(cleanarf) preproc = 'C,D,B 128,F 32' if cleanarf['nsub'] > 32: preproc += ",T 32" diagnose.make_composite_summary_plot_psrplot(cleanarf, preproc, \ cleanarf.fn+".scrunched.ps") cleanarfs.append(cleanarf) if self.maketoas: # Make TOAs utils.print_info("Generating TOAs", 1) stdfn = toas.get_standard(cleanarf) if not os.path.isfile(stdfn): raise errors.NoStandardProfileError("The standard profile (%s) " \ "cannot be found!" % stdfn) utils.print_info("Standard profile: %s" % stdfn, 2) toastrs.extend(toas.get_toas(cleanarf, stdfn)) else: toastrs = None return cleanarfs, toastrs
def create_gallery(self, path, folder_name, do_chapters=True, archive=None): is_archive = True if archive else False temp_p = archive if is_archive else path folder_name = folder_name or path if folder_name or path else os.path.split( archive)[1] if utils.check_ignore_list(temp_p) and not GalleryDB.check_exists( temp_p, self.galleries_from_db, False): log_i('Creating gallery: {}'.format( folder_name.encode('utf-8', 'ignore'))) new_gallery = Gallery() images_paths = [] metafile = utils.GMetafile() try: con = scandir.scandir( temp_p) #all of content in the gallery folder log_i('Gallery source is a directory') chapters = sorted([sub.path for sub in con if sub.is_dir() or sub.name.endswith(utils.ARCHIVE_FILES)])\ if do_chapters else [] #subfolders # if gallery has chapters divided into sub folders numb_of_chapters = len(chapters) if numb_of_chapters != 0: log_i('Gallery has {} chapters'.format(numb_of_chapters)) for ch in chapters: chap = new_gallery.chapters.create_chapter() chap.title = utils.title_parser(ch)['title'] chap.path = os.path.join(path, ch) chap.pages = len([ x for x in scandir.scandir(chap.path) if x.name.endswith(utils.IMG_FILES) ]) metafile.update(utils.GMetafile(chap.path)) else: #else assume that all images are in gallery folder chap = new_gallery.chapters.create_chapter() chap.title = utils.title_parser( os.path.split(path)[1])['title'] chap.path = path metafile.update(utils.GMetafile(chap.path)) chap.pages = len(list(scandir.scandir(path))) parsed = utils.title_parser(folder_name) except NotADirectoryError: try: if is_archive or temp_p.endswith(utils.ARCHIVE_FILES): log_i('Gallery source is an archive') contents = utils.check_archive(temp_p) if contents: new_gallery.is_archive = 1 new_gallery.path_in_archive = '' if not is_archive else path if folder_name.endswith('/'): folder_name = folder_name[:-1] fn = os.path.split(folder_name) folder_name = fn[1] or fn[2] folder_name = folder_name.replace('/', '') if folder_name.endswith(utils.ARCHIVE_FILES): n = folder_name for ext in utils.ARCHIVE_FILES: n = n.replace(ext, '') parsed = utils.title_parser(n) else: parsed = utils.title_parser(folder_name) if do_chapters: archive_g = sorted(contents) if not archive_g: log_w('No chapters found for {}'.format( temp_p.encode(errors='ignore'))) raise ValueError for g in archive_g: chap = new_gallery.chapters.create_chapter( ) chap.in_archive = 1 chap.title = parsed[ 'title'] if not g else utils.title_parser( g.replace('/', ''))['title'] chap.path = g metafile.update(utils.GMetafile(g, temp_p)) arch = utils.ArchiveFile(temp_p) chap.pages = len([ x for x in arch.dir_contents(g) if x.endswith(utils.IMG_FILES) ]) arch.close() else: chap = new_gallery.chapters.create_chapter() chap.title = utils.title_parser( os.path.split(path)[1])['title'] chap.in_archive = 1 chap.path = path metafile.update(utils.GMetafile(path, temp_p)) arch = utils.ArchiveFile(temp_p) chap.pages = len(arch.dir_contents('')) arch.close() else: raise ValueError else: raise ValueError except ValueError: log_w('Skipped {} in local search'.format( path.encode(errors='ignore'))) self.skipped_paths.append(( temp_p, 'Empty archive', )) return except app_constants.CreateArchiveFail: log_w('Skipped {} in local search'.format( path.encode(errors='ignore'))) self.skipped_paths.append(( temp_p, 'Error creating archive', )) return except app_constants.TitleParsingError: log_w('Skipped {} in local search'.format( path.encode(errors='ignore'))) self.skipped_paths.append(( temp_p, 'Error while parsing folder/archive name', )) return new_gallery.title = parsed['title'] new_gallery.path = temp_p new_gallery.artist = parsed['artist'] new_gallery.language = parsed['language'] new_gallery.info = "" new_gallery.view = app_constants.ViewType.Addition metafile.apply_gallery(new_gallery) if app_constants.MOVE_IMPORTED_GALLERIES and not app_constants.OVERRIDE_MOVE_IMPORTED_IN_FETCH: new_gallery.move_gallery() self.LOCAL_EMITTER.emit(new_gallery) self._data.append(new_gallery) log_i('Gallery successful created: {}'.format( folder_name.encode('utf-8', 'ignore'))) return True else: log_i('Gallery already exists or ignored: {}'.format( folder_name.encode('utf-8', 'ignore'))) self.skipped_paths.append((temp_p, 'Already exists or ignored')) return False
import sys import subprocess import toas import utils for fn in sys.argv[1:]: arf = utils.ArchiveFile(fn) stdfn = toas.get_standard(arf) basefn = stdfn[:-4] mfn = basefn + ".m" txtfn = basefn + ".txt" cmd = "paas -w %s -D -s %s -j %s -i %s" % (mfn, stdfn, txtfn, fn) subprocess.call(cmd, shell=True)
#!/usr/bin/env python import sys import os.path import utils if "-h" in sys.argv or "--help" in sys.argv or len(sys.argv) < 3: sys.stderr.write("Usage: %s OUTNAME INFILE\n" % \ os.path.split(sys.argv[0])[-1]) sys.exit(1) arf = utils.ArchiveFile(sys.argv[2]) print utils.get_outfn(sys.argv[1], arf)
def main(): for arfn in args: print "Plotting %s" % arfn, arf = utils.ArchiveFile(arfn) diagnose.make_composite_summary_plot(arf, options.outpsfn) print " Done"