if args.obslog_line is not None: obsinfo = parse_obslog_line(args.obslog_line) else: obsinfo = None for fn in args.files: corrfn, corrstr, note = correct_header(fn, obsinfo=obsinfo, outfn=args.outfn, backend=args.backend_name) print " Output corrected file: %s" % corrfn print " Notes: %s" % note if __name__ == '__main__': parser = utils.DefaultArguments(description="Correct header of Asterix " \ "data files.") parser.add_argument('files', nargs='*', help="Files to correct.") parser.add_argument('--obslog-line', dest='obslog_line', type=str, \ help="Line from observing log to use. " \ "(Default: search observing logs for " \ "the appropriate line.)") parser.add_argument('-b', '--backend-name', dest='backend_name', type=str, \ help="Name of backend to use. (Default: 'asterix')", \ default='asterix') parser.add_argument('-o', '--outname', dest='outfn', type=str, \ help="The output (reduced) file's name. " \ "(Default: '%s.corr')" % \ config.outfn_template.replace("%", "%%"), \ default=config.outfn_template+".corr") args = parser.parse_args() main()
"No maser file found for MJD %d\n" % (mjd, imjd)) except NoMaserData: outfile.write("# Cannot determine clock correction for MJD %g: " \ "No maser data parsed from file for MJD %d\n" % (mjd, imjd)) except ValueError, exc: outfile.write("# Cannot determine clock correction for MJD %g: " \ "%s\n" % (mjd, str(exc))) # Write clock correction for far in future (to allow for extrapolation?) outfile.write("60000.00000 0.00000e+00\n") if args.outfn is not None: outfile.close() if __name__ == '__main__': parser = utils.DefaultArguments(description="Write a clock correction " "file for Effelsberg.") parser.add_argument("-o", dest='outfn', default=None, help="Output file. (Default: stdout)") parser.add_argument("-s", "--start-mjd", dest='start_mjd', type=int, default=55562, help="MJD for start of clock correction file. " "(Default: 55562 - i.e. Jan. 1, 2011, " "the year Asterix was installed)") parser.add_argument("-e", "--end-mjd", dest='end_mjd',
def main(): db = database.Database() with db.transaction() as conn: select = db.select([db.files]).\ where((db.files.c.snr == None) & (db.files.c.is_deleted == False) & (db.files.c.stage != 'grouped')).\ order_by(db.files.c.added.desc()) result = conn.execute(select) rows = result.fetchall() result.close() for row in utils.show_progress(rows, width=50, tot=len(rows)): fn = os.path.join(row['filepath'], row['filename']) try: snr = utils.get_archive_snr(fn) except Exception, e: sys.stderr.write("Error when computing SNR of %s." "%s" % (fn, str(e))) else: update = db.files.update().\ values(snr=snr).\ where(db.files.c.file_id == row['file_id']) result = conn.execute(update) result.close() if __name__ == '__main__': parser = utils.DefaultArguments(description="Add SNR to files where" "it is missing.") args = parser.parse_args() main()
# Remove fit-flags for 'outff2' #outff2.write(" ".join(line.split()[:2])+'\n') outff.write("\n".join(EXTRA_PARFILE_LINES)) #outff2.write("\n".join(["JUMP -sys EFF.AS.%s.CL 0 1" % rcvr for rcvr in RCVRS])) #outff2.write("\nNITS 3\n") # Create a master timfile master_timfn = "%s_all.tim" % psrname with open(master_timfn, 'w') as ff: for timfn in timfns: ff.write("INCLUDE %s\n" % timfn) utils.print_info("Wrote out master timfile: %s" % master_timfn) if __name__ == '__main__': parser = utils.DefaultArguments(description="Check timing of a pulsar.") parser.add_argument('-p', '--psr', dest='psrname', type=str, required=True, help='Name of the pulsar to fetch files for.') parser.add_argument('-E', '--parfile', dest='parfile', type=str, help="Parfile to prepare for checking timing." "(Default: use parfile from %s" % PARFILE_DIR) parser.add_argument('--nchan', dest='nchan', type=int,
conn.execute(delete) results.close() # Remove directories entries print "Removing directories rows" for row in utils.show_progress(dirrows, width=50, tot=len(dirrows)): dir_id = row['dir_id'] delete = db.directories.delete().\ where(db.directories.c.dir_id == dir_id) conn.execute(delete) results.close() if __name__ == '__main__': parser = utils.DefaultArguments(description="Delete recent files.") parser.add_argument("-n", "--dryrun", dest="dryrun", action="store_true", help="Show some information and do not delete " "files or database rows. " "(Default: delete files/rows)") parser.add_argument("-p", "--psr", dest="psr", required=True, help="Pulsar for which to find entries to delete.") args = parser.parse_args() main()
dest = os.path.join(backupdir, fn) if os.path.isfile(dest) and not os.path.isfile(src): shutil.move(dest, src) if os.path.isdir(backupdir): try: os.remove(os.path.join(backupdir, "db_entries.sql")) os.rmdir(backupdir) except: print "Could not remove back-up dir %s" % backupdir raise else: print "Successfully reseted obs ID: %d" % obs_id if __name__ == '__main__': parser = utils.DefaultArguments( description="Reset observation to be reprocessed.") parser.add_argument("--obs-id", dest='obs_id', type=int, help="ID of observation to set for reprocessing.") parser.add_argument("-n", "--dry-run", action='store_true', dest='dryrun', help="Don't actually remove database entries or " "move files. (Default: remove and move)") args = parser.parse_args() main()
fn = os.path.basename(src) dest = os.path.join(backupdir, fn) if os.path.isfile(dest) and not os.path.isfile(src): shutil.move(dest, src) if os.path.isdir(backupdir): try: os.rmdir(backupdir) except: print "Could not remove back-up dir %s" % backupdir raise else: print "Successfully scrubbed %s (ID: %d)" % (dir_toremove, dir_id) if __name__ == '__main__': parser = utils.DefaultArguments(description="Remove directory, processed " "files and database entries.") dirgroup = parser.add_mutually_exclusive_group() dirgroup.add_argument("--dir-id", dest='dir_id', type=int, help="ID of directory to remove's database entry.") dirgroup.add_argument("--dir", dest="dir", type=str, help="Raw data directory whose database entries " "should be removed. Note: the raw data " "nor its directory will _not_ be removed.") parser.add_argument("-n", "--dry-run", action='store_true', dest='dryrun', help="Don't actually remove database entries or " "move files. (Default: remove and move)") args = parser.parse_args() main()
trimpcnt=6.25) outfn = combine_subints(preppeddirs, subints, outdir=os.getcwd()) outfns.append(outfn) if args.write_listing: write_listing(usedirs, subints, "list.txt") shutil.rmtree(tmpdir) if outfns: print "Created %d combined files" % len(outfns) for outfn in outfns: print " %s" % outfn if __name__ == "__main__": parser = utils.DefaultArguments(usage="%(prog)s [OPTIONS] DIRS-TO-COMBINE", \ description="Given a list of frequency sub-band " \ "directories containing sub-ints to " \ "combine, group them and create " \ "combined archives.") parser.add_argument('subdirs', nargs='*', help="Sub-band directories " \ "containing subints to combine.") parser.add_argument( '-f', '--group-file', dest='group_file', type=str, help="Combine files/directories listed in group file. " "These files can be output by combine.py. " "(Default: Combine directories listed on command line.)") # parser.add_argument('-o', '--outname', dest='outfn', type=str, \ # help="The output (combined) file's name. " \ # "(Default: '%%(name)s_%%(yyyymmdd)s_%%(secs)05d_combined.ar')", \
if len(args.files): print "Number of input files: %d" % len(args.files) else: raise errors.InputError("No files to calibrate!") if args.caldb is None: # Prepare to fetch caldb info from the pipeline database db = database.Database() else: caldb = args.caldb for fn in args.files: if args.caldb is None: arf = utils.ArchiveFile(fn) caldb = update_caldb(db, arf['name'], force=True) calfn = calibrate(fn, caldb) #print " Output calibrated file: %s" % calfn if __name__ == '__main__': parser = utils.DefaultArguments(description="Calibrate Asterix " \ "data files.") parser.add_argument('files', nargs='*', help="Files to calibrate.") parser.add_argument('--caldb', dest='caldb', type=str, \ help="Calibrator database to use. " \ "(Default: use the database for this " \ "pulsar from the pipeline)", \ default=None) args = parser.parse_args() main()
def main(): if args.outdir is None: outdir = os.getcwd() else: outdir = args.outdir psrname = utils.get_prefname(args.psr) stdfn = make_template(outdir, psrname, args.stage, args.rcvr, args.max_span, args.min_snr) print "Made template: %s", stdfn if __name__ == '__main__': parser = utils.DefaultArguments(description="Combine multiple files close " "in MJD to create a high-SNR " "profile to generate a template " "using paas") parser.add_argument('-p', '--psr', dest='psr', type=str, required=True, help="The pulsar to create a template for.") parser.add_argument('--rcvr', dest='rcvr', type=str, required=True, help="The name of the receiver for " "which to make a template.") parser.add_argument( '-C',
#!/usr/bin/env python from coast_guard import calibrate from coast_guard import database from coast_guard import utils def main(): db = database.Database() caldbfn = calibrate.update_caldb(db, args.sourcename, force=True) print "Updated %s" % caldbfn if __name__ == '__main__': parser = utils.DefaultArguments(description="Forcefully update " \ "calibrator database for a given source.") parser.add_argument("-n", "--sourcename", dest='sourcename', type=str, \ help="Name of source for which to update calibrator database.") args = parser.parse_args() main()
"Stage: %(stage)s; " "QC passed: %(qcpassed)s" % row, 2) info['Total'] = info.get('Total', 0) + 1 info['QC Passed'] = info.get('QC Passed', 0) + int( bool(row['qcpassed'])) info['Status %s' % row['status']] = \ info.get('Status %s' % row['status'], 0)+1 info['Stage %s' % row['stage']] = \ info.get('Stage %s' % row['stage'], 0)+1 utils.print_info( "Summary:\n %s" % "\n ".join(["%s: %d" % xx for xx in info.iteritems()]), 1) if __name__ == '__main__': parser = utils.DefaultArguments(description="List files generated by " "the automated pipeline.") parser.add_argument('-p', '--psr', dest='psrnames', type=str, action='append', help="The pulsar to grab files for. " "NOTE: Multiple '-p'/'--psr' options may be given") parser.add_argument('--type', dest='type', type=str, help='Type of files to list. Options are:' '%s' % sorted(set(FILETYPE_TO_WHERE.keys()))) parser.add_argument('-F', '--file-id', action='append',
utils.print_info( "Will retry calibration of file %d" % row['file_id'], 1) if args.retry: for name in psrnameset: try: reduce_data.reattempt_calibration(db, name) calibrate.update_caldb(db, name, force=True) except: pass if __name__ == '__main__': parser = utils.DefaultArguments(description="For each matching file print " "if it can be polarization " "calibrated (i.e. sufficient " "calibration scans are registered " "in the database)") parser.add_argument('-p', '--psr', dest='psrnames', type=str, action='append', help="The pulsar to grab files for. " "NOTE: Multiple '-p'/'--psr' options may be given") parser.add_argument('--sort', dest='sortkeys', metavar='SORTKEY', \ action='append', default=['added'], \ help="DB column to sort raw data files by. Multiple " \ "--sort options can be provided. Options " \ "provided later will take precedent " \ "over previous options. (Default: Sort " \
c='k') plt.ylim(0, 30) plt.xlim(0, 20) plt.subplot(2, 2, ii + 2) plt.scatter(data['detect' + rx][0], data['detect' + rx][1], marker='o', c='k') plt.ylim(0, 30) plt.xlim(0, 20) plt.suptitle(psrname, fontsize=14) plt.show() if __name__ == '__main__': parser = utils.DefaultArguments( description="Check detections for a pulsar.") parser.add_argument('-p', '--psr', dest='psrname', type=str, required=True, help='Name of the pulsar to check.') # parser.add_argument('--file', dest='psrlist', type=str, # required=True, # help='sfsfdf') parser.add_argument('--files', dest='psrfile', type=str, help="gfger") args = parser.parse_args() main()
def main(): app = qtgui.QApplication(sys.argv) qctrl_win = QualityControl(priorities=args.priority, stage=args.stage, re_eval=args.re_eval) qctrl_win.get_files_to_check() # Display the window qctrl_win.show() exitcode = app.exec_() sys.exit(exitcode) if __name__ == "__main__": parser = utils.DefaultArguments(description="Quality control interface " "for Asterix data.") parser.add_argument("--prioritize", action='append', default=[], dest='priority', help="A rule for prioritizing observations.") parser.add_argument('-C', "--calibrated", dest='stage', action='store_const', default='cleaned', const='calibrated', help="Review calibrated pulsar observations.") parser.add_argument('-R', "--re-eval", dest='re_eval', action='store_true', help="Review files with status 'new' even if they already " "have a quality control assessment.") args = parser.parse_args() main()