def zap(self, arfn, reset_weights=False): self.setWindowTitle("Zapping %s..." % os.path.basename(arfn)) # Create temporary file for output tmpdir = os.path.join(config.tmp_directory, 'qctrl') if not os.path.exists(tmpdir): os.makedirs(tmpdir) tmpfile, tmpoutfn = tempfile.mkstemp(suffix=".ar", dir=tmpdir) os.close(tmpfile) # Close open file handle try: success = self.__launch_zapping(arfn, tmpoutfn, reset_weights) if success: arf = utils.ArchiveFile(arfn) archivedir = os.path.join(config.output_location, config.output_layout) % arf # Append .zap to filename archivefn = (os.path.basename(arfn)+".zap") % arf outfn = os.path.join(archivedir, archivefn) # Ensure group has write permission to this file # NOT SURE THIS IS NECESSARY #utils.add_group_permissions(tmpoutfn, 'w') shutil.move(tmpoutfn, outfn) return outfn else: return None finally: if os.path.exists(tmpdir): shutil.rmtree(tmpdir)
def zap_file_manually(self, reset_weights=False): arfn = os.path.join(self.fileinfo['filepath'], self.fileinfo['filename']) arf = utils.ArchiveFile(arfn) zapdialog = ZappingDialog() zapdialog.show() # This blocks input to the main quality control window out = zapdialog.zap(arfn, reset_weights) if out is not None and os.path.isfile(out): # Successful! Insert entry into DB. outdir, outfn = os.path.split(out) values = {'filepath': outdir, 'filename': outfn, 'stage': 'cleaned', 'note': "Manually zapped", 'qcpassed': None, 'status': 'new', 'snr': utils.get_archive_snr(out), 'md5sum': utils.get_md5sum(out), 'coords': self.fileinfo['coords'], 'ephem_md5sum': self.fileinfo['ephem_md5sum'], 'filesize': os.path.getsize(out), 'parent_file_id': self.file_id} if self.fileinfo['stage'] == 'calibrated': values['stage'] = 'calibrated' values['cal_file_id'] = self.fileinfo['cal_file_id'] with self.db.transaction() as conn: version_id = utils.get_version_id(self.db) # Insert new entry insert = self.db.files.insert().\ values(version_id=version_id, obs_id=self.fileinfo['obs_id']) result = conn.execute(insert, values) file_id = result.inserted_primary_key[0] # Update parent file's entry update = self.db.files.update().\ where(self.db.files.c.file_id == self.file_id).\ values(qcpassed=False, status='replaced', note="File had to be cleaned by hand.", last_modified=datetime.datetime.now()) result = conn.execute(update) # Update current file for observation update = self.db.obs.update().\ where(self.db.obs.c.obs_id == self.fileinfo['obs_id']).\ values(current_file_id=file_id, last_modified=datetime.datetime.now()) conn.execute(update) self.advance_file()
def main(): print "" print " clean.py" print " Patrick Lazarus" print "" file_list = args.files + args.from_glob to_exclude = args.excluded_files + args.excluded_by_glob to_clean = utils.exclude_files(file_list, to_exclude) print "Number of input files: %d" % len(to_clean) # Read configurations for infn in to_clean: inarf = utils.ArchiveFile(infn) config.cfg.load_configs_for_archive(inarf) outfn = utils.get_outfn(args.outfn, inarf) shutil.copy(inarf.fn, outfn) outarf = utils.ArchiveFile(outfn) ar = outarf.get_archive() try: for name, cfgstrs in args.cleaner_queue: # Set up the cleaner cleaner = cleaners.load_cleaner(name) for cfgstr in cfgstrs: cleaner.parse_config_string(cfgstr) cleaner.run(ar) except: # An error prevented cleaning from being successful # Remove the output file because it may confuse the user #if os.path.exists(outfn): # os.remove(outfn) raise finally: ar.unload(outfn) print "Cleaned archive: %s" % outfn
def run_coastguard(calname, cal_dz): #calname = sys.argv[1] #cal_dz = sys.argv[2] print calname inarf = utils.ArchiveFile(calname) config.cfg.load_configs_for_archive(inarf) outfn = utils.get_outfn(cal_dz, inarf) shutil.copy(inarf.fn, outfn) outarf = utils.ArchiveFile(outfn) ar = outarf.get_archive() cleaner = cleaners.load_cleaner('preclean') # hard coded, need to be fixed #for cfgstr in cfgstrs: # cleaner.parse_config_string(cfgstr) cleaner.run(ar) cleaner = cleaners.load_cleaner('surgical') # hard coded, need to be fixed cleaner.run(ar) print(type(outfn)) ar.unload(str(outfn)) print "Cleaned archive: %s" % outfn
def correct_header(arfn, obsinfo=None, outfn=None, backend='asterix', receiver=None): """Correct header of asterix data in place. Input: arfn: The name of the input archive file. obsinfo: A dictionary of observing log information to use. (Default: search observing logs for matching entry) outfn: Output file name. (Default: same as input file name, but with .corr extension) backend: Override backend name with this value. (Default: asterix) receiver: Override receiver name with this value. (Default: Determine receiver automatically) Output: corrfn: The name of the corrected file. corrstr: The parameter string of corrections used with psredit. note: A note about header correction """ corrstr, note = get_correction_string(arfn, obsinfo, receiver=receiver, backend=backend) # Correct the file using 'psredit' utils.execute(['psredit', '-e', 'corr', '-c', corrstr, arfn], stderr=open(os.devnull)) # Assume the name of the corrected file corrfn = os.path.splitext(arfn)[0] + ".corr" # Confirm that our assumed file name is accurate if not os.path.isfile(corrfn): raise errors.HeaderCorrectionError("The corrected file (%s) does not " \ "exist!" % corrfn) # Rename output file if outfn is not None: arf = utils.ArchiveFile(corrfn) fn = outfn % arf shutil.move(corrfn, fn) corrfn = fn return corrfn, corrstr, note
def clean_archive(inarf, outfn, clean_re=None, *args, **kwargs): import psrchive # Temporarily, because python bindings # are not available on all computers if clean_re is None: clean_re = config.cfg.clean_strategy try: outfn = utils.get_outfn(outfn, inarf) shutil.copy(inarf.fn, outfn) outarf = utils.ArchiveFile(outfn) trim_edge_channels(outarf) prune_band(outarf) remove_bad_channels(outarf) remove_bad_subints(outarf) matching_cleaners = [ clnr for clnr in cleaners if clean_re and re.search(clean_re, clnr) ] if len(matching_cleaners) == 1: ar = psrchive.Archive_load(outarf.fn) cleaner = eval(matching_cleaners[0]) utils.print_info( "Cleaning using '%s(...)'." % matching_cleaners[0], 2) cleaner(ar, *args, **kwargs) ar.unload(outfn) elif len(matching_cleaners) == 0: utils.print_info("No cleaning strategy selected. Skipping...", 2) else: raise errors.CleanError("Bad cleaner selection. " \ "'%s' has %d matches." % \ (clean_re, len(matching_cleaners))) except: # An error prevented cleaning from being successful # Remove the output file because it may confuse the user if os.path.exists(outfn): os.remove(outfn) raise return outarf
def main(): print "" print " calibrate.py" print " Patrick Lazarus" print "" if len(args.files): print "Number of input files: %d" % len(args.files) else: raise errors.InputError("No files to calibrate!") if args.caldb is None: # Prepare to fetch caldb info from the pipeline database db = database.Database() else: caldb = args.caldb for fn in args.files: if args.caldb is None: arf = utils.ArchiveFile(fn) caldb = update_caldb(db, arf['name'], force=True) calfn = calibrate(fn, caldb)
def get_correction_string(arfn, obsinfo=None, backend='asterix', receiver=None, fixcoords=False): """Get psredit command string that will correct the file header. Input: arfn: The name of the input archive file. obsinfo: A dictionary of observing log information to use. (Default: search observing logs for matching entry) backend: Override backend name with this value. (Default: asterix) receiver: Override receiver name with this value. (Default: Determine receiver automatically) fixcoords: Force fixing of coordinates. (Default: Don't bother if they seem to be correct) Output: corrstr: The parameter string of corrections used with psredit. note: A note about header correction """ note = "" # Load archive arf = utils.ArchiveFile(arfn) if receiver is None: rcvr = determine_receiver(arf) elif receiver in ('P217-3', 'P200-3', 'S110-1', 'S60-2', 'S36-5'): rcvr = receiver else: raise ValueError("Receiver provided (%s) is not recognized." % receiver) if arf['rcvr'] != rcvr: note += "Receiver is wrong (%s) setting to '%s'. " % \ (arf['rcvr'], rcvr) corrstr = "%s,be:name=%s" % (RCVR_INFO[rcvr], backend) if fixcoords or (obsinfo is not None) or arf['name'].endswith('_R') or \ arf['ra'].startswith('00:00:00'): try: if obsinfo is None: # Search for observing log entry obsinfo = get_obslog_entry(arf, tolerant=True) utils.print_debug( "Information from matching observing log line:\n%s" % pprint.pformat(obsinfo), 'correct') rastr, decstr = get_coordinates(arf, obsinfo) except errors.HeaderCorrectionError as exc: note += exc.get_message() + "\n(Could not correct coordinates)" raise else: corrstr += ",coord=%s%s" % (rastr, decstr) else: note += "No reason to correct coords." if obsinfo is not None: name = obsinfo['name'] corrstr += ",name=%s" % obsinfo['name'] else: name = arf['name'] if name.endswith("_R"): # Calibration diode was fired. # Observation could be pol-cal scan or flux-cal scan if any([ name.startswith(fluxcal) for fluxcal in utils.read_fluxcal_names(config.fluxcal_cfg) ]): # Flux calibrator if name.endswith("_S_R") or name.endswith("_N_R"): corrstr += ",type=FluxCal-Off" elif name.endswith("_O_R"): corrstr += ",type=FluxCal-On" else: # Polarization calibrator corrstr += ",type=PolnCal" else: corrstr += ",type=Pulsar" return corrstr, note
def get_start_from_singlepulse(single): arf = utils.ArchiveFile(single) return arf.datetime
def combine_subints(subdirs, subints, parfn=None, outdir=None): """Combine sub-ints from various freq sub-band directories. The input lists are as created by 'group_subband_dirs' or read-in by 'read_listing'. Inputs: subdirs: List of sub-band directories containing sub-ints to combine subints: List of subint files to be combined. (NOTE: These are the file name only (i.e. no path) Each file listed should appear in each of the subdirs.) parfn: New ephemeris to install when combining subints. (Default: Use ephemeris in archive file's header) outdir: Directory to output combined file. (Default: Current working directory) Output: outfn: The name of the combined file. """ if outdir is None: outdir = os.getcwd() subints = sorted(subints) tmpdir = tempfile.mkdtemp(suffix="_combine", dir=config.tmp_directory) devnull = open(os.devnull) try: cmbsubints = [] # Try to normalise the archive's parfile try: if parfn is None: arfn = os.path.join(subdirs[0], subints[0]) normparfn = utils.get_norm_parfile(arfn) else: normparfn = utils.normalise_parfile(parfn) except errors.InputError: # No parfile present parargs = [] else: parargs = ['-E', normparfn] utils.print_info("Adding freq sub-bands for each sub-int...", 2) for ii, subint in enumerate(utils.show_progress(subints, width=50)): to_combine = [os.path.join(path, subint) for path in subdirs] outfn = os.path.join(tmpdir, "combined_%s" % subint) cmbsubints.append(outfn) utils.execute(['psradd', '-q', '-R', '-o', outfn] + parargs + to_combine, stderr=devnull) arf = utils.ArchiveFile( os.path.join(tmpdir, "combined_%s" % subints[0])) outfn = os.path.join( outdir, "%s_%s_%s_%05d_%dsubints.cmb" % (arf['name'], arf['band'], arf['yyyymmdd'], arf['secs'], len(subints))) utils.print_info("Combining %d sub-ints..." % len(cmbsubints), 1) utils.execute(['psradd', '-q', '-o', outfn] + cmbsubints, stderr=devnull) except: raise # Re-raise the exception finally: if debug.is_on('reduce'): warnings.warn("Not cleaning up temporary directory (%s)" % tmpdir, \ errors.CoastGuardWarning) else: utils.print_info("Removing temporary directory (%s)" % tmpdir, 2) shutil.rmtree(tmpdir) return outfn