def __insert_processing_float_diagnostic(proc_id, diag, existdb=None): """Insert processing float diagnostic. Inputs: proc_id: The ID number of the processing job for which the diagnostic describes. diag: A FloatDiagnostic object. existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ # Connect to the database db = existdb or database.Database() db.connect() try: ins = db.proc_diagnostics.insert() values = {'process_id': proc_id, 'value': diag.diagnostic, 'type': diag.name} result = db.execute(ins, values) diag_id = result.inserted_primary_key[0] result.close() notify.print_info("Inserted process diagnostic (type: %s)." % diag.name, 2) finally: if not existdb: # Close DB connection db.close() return diag_id
def __insert_rawfile_float_diagnostic(rawfile_id, diag, existdb=None): """Insert rawfile float diagnostic. Inputs: rawfile_id: The ID number of the raw file for which to insert the diagnostic. diag: A FloatDiagnostic object. existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ # Connect to the database db = existdb or database.Database() db.connect() try: ins = db.raw_diagnostics.insert() values = {'rawfile_id':rawfile_id, \ 'value':diag.diagnostic, \ 'type':diag.name} result = db.execute(ins, values) diag_id = result.inserted_primary_key[0] result.close() notify.print_info("Inserted rawfile diagnostic (type: %s)." % \ diag.name, 2) finally: if not existdb: # Close DB connection db.close() return diag_id
def main(args): if args.timfile is None: raise errors.BadInputError("An input timfile is required.") if args.pulsar_name is None: raise errors.BadInputError("The pulsar name must be provided.") if args.format not in READERS: raise errors.UnrecognizedValueError("The requested timfile format " "'%s' is not recognized. " "Available formats: '%s'." % (args.format, "', '".join(sorted(READERS.keys())))) # Pulsar must already included in DB pulsar_id = cache.get_pulsarid(args.pulsar_name) obssystem_discovery_args = {'obssystem_name': args.obssystem, 'obssystem_flags': args.obssystem_flags, 'backend_name': args.backend, 'backend_flags': args.backend_flags, 'frontend_name': args.frontend, 'frontend_flags': args.frontend_flags} if args.dry_run: # Parse input file toas = parse_timfile(args.timfile, reader=args.format, **obssystem_discovery_args) print "%d TOAs parsed" % len(toas) msg = [] for toa in toas: msg.append("TOA info: %s" % "\n ".join(["%s: %s" % xx for xx in toa.iteritems()])) notify.print_info("\n".join(msg), 3) else: load_from_timfile(args.timfile, pulsar_id=pulsar_id, reader=args.format, **obssystem_discovery_args)
def fill_process_table(version_id, rawfile_id, parfile_id, template_id, manip, nchan, nsub, existdb=None): db = existdb or database.Database() db.connect() ins = db.process.insert() values = {'version_id': version_id, 'rawfile_id': rawfile_id, 'parfile_id': parfile_id, 'template_id': template_id, 'manipulator': manip.name, 'manipulator_args': manip.argstr, 'nchan': nchan, 'nsub': nsub, 'toa_fitting_method': config.cfg.toa_fitting_method, 'user_id': cache.get_userid()} result = db.execute(ins, values) process_id = result.inserted_primary_key[0] result.close() notify.print_info("Added processing run to DB. Processing ID: %d" % process_id, 1) # Close DB connection if not existdb: db.close() return process_id
def main(args): # Establish a DB connection db = database.Database() db.connect() trans = db.begin() try: template = general.get_template_from_id(args.template_id, existdb=db) # First remove the template entry from the DB remove_template_entry(args.template_id) # Now deal with the template itself if args.action == "leave": # Do nothing pass elif args.action == "move": if not args.dest: raise errors.BadInputError("Destination must be provided " "when moving template.") notify.print_info("Moving template %s to %s" % (template, args.dest)) shutil.move(template, args.dest) elif args.action == "delete": notify.print_info("Deleting template %s" % template) os.remove(template) else: raise errors.UnrecognizedValueError("The action provided (%s) " "is not recognized." % args.action) except: trans.rollback() raise else: trans.commit() finally: db.close()
def show_timfiles(timfiles): if len(timfiles): print "--"*25 for timfile in timfiles: print colour.cstring("Timfile ID:", underline=True, bold=True) + \ colour.cstring(" %d" % timfile['timfile_id'], bold=True) print "Pulsar name: %s" % timfile['pulsar_name'] print "Master timfile? %s" % \ (((timfile['mtimid'] is not None) and "Yes") or "No") print "Last edited by: %s (%s)" % (timfile['real_name'], timfile['email_address']) print "Comments: %s" % timfile['comments'] print "Date and time timfile was last edited: %s" % \ timfile['add_time'].isoformat(' ') print "Number of TOAs: %d" % timfile['numtoas'] if timfile['any_replaced'] is not None: colour.cprint("Some TOAs are from rawfiles that been " "superseded", 'warning') # Show extra information if verbosity is >= 1 lines = ["First TOA (MJD): %s" % timfile['startmjd'], "Last TOA (MJD): %s" % timfile['endmjd'], "Number of telescopes used: %d" % timfile['numtelescopes'], "Number of observing systems used: %d" % timfile['numobsys']] notify.print_info("\n".join(lines), 1) print "--"*25 else: raise errors.ToasterError("No timfiles match parameters provided!")
def _compute(self): notify.print_info("Calling psrstat to get weights for %s" % self.fn, 3) cmd = ["psrstat", self.fn, "-c", "int:wt", "-Qq"] outstr, errstr = utils.execute(cmd) wtstrs = outstr.strip().split(',') weights = np.array([float(wt) for wt in wtstrs]) maskpcnt = 100.0*np.sum(weights > 0)/weights.size return maskpcnt
def main(args): # Check to make sure user provided a comment if not args.dry_run and args.comments is None: raise errors.BadInputError("A comment describing the timfile is " "required!") if args.from_file is not None: # Re-create parser, so we can read arguments from file parser = utils.DefaultArguments() add_arguments(parser) if args.from_file == '-': argfile = sys.stdin else: if not os.path.exists(args.from_file): raise errors.FileError("The list of cmd line args (%s) " "does not exist." % args.from_file) argfile = open(args.from_file, 'r') for line in argfile: # Strip comments line = line.partition('#')[0].strip() if not line: # Skip empty line continue arglist = shlex.split(line.strip()) args = parser.parse_args(arglist, namespace=args) # Establish a database connection db = database.Database() db.connect() trans = db.begin() try: cmdline = " ".join(sys.argv) toas = get_toas(args, db) if debug.is_on('TIMFILE'): # Check for / handle conflicts conflict_handler = CONFLICT_HANDLERS[args.on_conflict] toas = conflict_handler(toas) wt.write_timfile(toas, {'comments': args.comments, 'user_id': cache.get_userid(), 'add_time': "Not in DB!", 'timfile_id': -1}) elif args.dry_run: print_summary(toas, args.comments) else: conflict_handler = CONFLICT_HANDLERS[args.on_conflict] timfile_id = add_timfile_entry(toas, cmdline, args.comments, conflict_handler) notify.print_info("Created new timfile entry - timfile_id=%d (%s)" % (timfile_id, utils.give_utc_now()), 1) except: db.rollback() db.close() raise else: db.commit() db.close()
def get_parfile_from_id(parfile_id, existdb=None, verify_md5=True): """Return the path to the raw file that has the given ID number. Optionally double check the file's MD5 sum, to make sure nothing strange has happened. Inputs: parfile_id: The ID number of the raw file to get a path for. existdb: A (optional) existing database connection object. (Default: Establish a db connection) verify_md5: If True, double check the file's MD5 sum. (Default: Perform MD5 check.) Output: fn: The full file path. """ notify.print_info("Looking-up raw file with ID=%d" % parfile_id, 2) # Use the existing DB connection, or open a new one if None was provided db = existdb or database.Database() db.connect() select = db.select([db.parfiles.c.filename, db.parfiles.c.filepath, db.parfiles.c.md5sum]).where( db.parfiles.c.parfile_id == parfile_id ) result = db.execute(select) rows = result.fetchall() result.close() if not existdb: # Close the DB connection we opened db.close() if len(rows) == 1: filename = rows[0]["filename"] filepath = rows[0]["filepath"] md5sum_from_db = rows[0]["md5sum"] else: raise errors.InconsistentDatabaseError( "Bad number of files (%d) " "with parfile_id=%d" % (len(rows), parfile_id) ) fullpath = os.path.join(filepath, filename) # Make sure the file exists datafile.verify_file_path(fullpath) if verify_md5: notify.print_info( "Confirming MD5 sum of %s matches what is " "stored in DB (%s)" % (fullpath, md5sum_from_db), 2 ) md5sum_file = datafile.get_md5sum(fullpath) if md5sum_from_db != md5sum_file: raise errors.FileError( "md5sum check of %s failed! MD5 from " "DB (%s) != MD5 from file (%s)" % (fullpath, md5sum_from_db, md5sum_file) ) return fullpath
def verify_file_path(fn): #Verify that file exists notify.print_info("Verifying file: %s" % fn, 2) if not os.path.isfile(fn): raise errors.FileError("File %s does not exist, you dumb dummy!" % fn) #Determine path (will retrieve absolute path) file_path, file_name = os.path.split(os.path.abspath(fn)) notify.print_info("File %s exists!" % os.path.join(file_path, file_name), 3) return file_path, file_name
def show_templates(templates): if len(templates): print "--"*25 for tdict in templates: print colour.cstring("Template ID:", underline=True, bold=True) + \ colour.cstring(" %d" % tdict['template_id'], bold=True) fn = os.path.join(tdict['filepath'], tdict['filename']) print "\nTemplate: %s" % fn print "Pulsar name: %s" % tdict['pulsar_name'] print "Master template? %s" % \ (((tdict['mtempid'] is not None) and "Yes") or "No") print "Number of phase bins: %d" % tdict['nbin'] print "Uploaded by: %s (%s)" % (tdict['real_name'], tdict['email_address']) print "Uploader's comments: %s" % tdict['comments'] print "Date and time template was added: %s" % \ tdict['add_time'].isoformat(' ') # Show extra information if verbosity is >= 1 lines = ["Observing System ID: %d" % tdict['obssystem_id'], "Observing System Name: %s" % tdict['obssys_name'], "Telescope: %s" % tdict['telescope_name'], "Frontend: %s" % tdict['frontend'], "Backend: %s" % tdict['backend'], "Clock: %s" % tdict['clock']] notify.print_info("\n".join(lines), 1) try: # Show the template if verbosity is >= 2 cmd = ["psrtxt", fn] psrtxtout, stderr = utils.execute(cmd) gnuplotcode = """set term dumb set format y "" set nokey set border 1 set tics out set xtics nomirror set ytics 0,1,0 set xlabel "Phase Bin" set xrange [0:%d] plot "-" using 3:4 w l %s end """ % (tdict.nbin-1, psrtxtout) plot, stderr = utils.execute(["gnuplot"], stderr=open(os.devnull), stdinstr=gnuplotcode) notify.print_info(plot, 2) except errors.SystemCallError: # gnuplot is probably not installed pass print "--"*25 else: raise errors.ToasterError("No templates match parameters provided!")
def merge_pulsar(src_pulsar_id, dest_pulsar_id, existdb=None): """Merge one pulsar entry into another. Inputs: src_pulsar_id: The ID of the pulsar entry that will be merged. NOTE: This entry will no longer exist following the merge. dest_pulsar_id: The ID of the pulsar entry that will be merged into. existdb: A (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ notify.print_info("Merging pulsar '%s' (ID: %d) into '%s' (ID: %d)" % (cache.get_pulsarname(src_pulsar_id), src_pulsar_id, cache.get_pulsarname(dest_pulsar_id), dest_pulsar_id), 2) # Connect to the database db = existdb or database.Database() db.connect() trans = db.begin() try: # Update all relevant entries in the database tables = [db.pulsar_aliases, db.timfiles, db.rawfiles, db.templates, db.parfiles, db.master_parfiles, db.master_templates, db.toas] values = {'pulsar_id': dest_pulsar_id} for table in tables: update = table.update().\ where(table.c.pulsar_id == src_pulsar_id) results = db.execute(update, values) results.close() # Remove now unused entry in the pulsars table delete = db.pulsars.delete().\ where(db.pulsars.c.pulsar_id == src_pulsar_id) results = db.execute(delete) results.close() except: trans.rollback() raise else: trans.commit() finally: if existdb is None: db.close()
def insert_processing_diagnostics(proc_id, diags, archivedir=None, suffix="", existdb=None): """Insert processing diagnostics, carefully checking if each diagnostic is float-valued, or plot-valued. Inputs: proc_id: The ID number of the processing job for which the diagnostic describes. diags: A list of computed diagnostics. archivedir: The location where diagnostic plots should be archived. (Default: put diagnostic plots in same directory as the input file.) suffix: Add a suffix just before the extension of diagnostic plots' filenames. (Default: Do not insert a suffix) existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ # Connect to the database db = existdb or database.Database() db.connect() try: for diag in diags: trans = db.begin() try: if isinstance(diag, diagnostics.base.FloatDiagnostic): __insert_processing_float_diagnostic(proc_id, diag, existdb=db) elif isinstance(diag, diagnostics.base.PlotDiagnostic): __insert_processing_diagnostic_plot(proc_id, diag, archivedir=archivedir, suffix=suffix, existdb=db) else: raise ValueError("Diagnostic is not a valid type (%s)!" % type(diag)) except errors.DiagnosticAlreadyExists, e: notify.print_info("Diagnostic already exists: %s. " "Skipping..." % str(e), 2) db.rollback() else: db.commit() finally: if not existdb: # Close DB connection db.close()
def show_pulsars(psrinfo): """Print pulsar info to screen in a human-readable format. Input: psrinfo: A dictionary of pulsar info dictionaries. (As returned by get_pulsarinfo(...)) Outputs: None """ print "--"*25 for psrid in sorted(psrinfo.keys()): psr = psrinfo[psrid] print colour.cstring("Pulsar ID:", underline=True, bold=True) + \ colour.cstring(" %d" % psrid, bold=True) print "Pulsar Name: %s" % psr['name'] print "Aliases:" for alias in psr['aliases']: if alias == psr['name']: continue print " %s" % alias if psr['parfile_id'] is None: print "No parfile loaded!" else: if psr['period'] > 1: print "Period: %.3f s" % psr['period'] else: print "Period: %.2f ms" % (1000.0*psr['period']) print "DM: %.2f pc/cc" % psr['dm'] print "R.A. (J2000): %s" % psr['raj'] print "Dec. (J2000): %s" % psr['decj'] print "Binary model: %s" % psr['binary'] lines = ["Number of observations: %d" % psr['numobs']] if psr['numobs'] > 0: lines.append("Telescopes used:\n " + "\n ".join(psr['telescopes'])) lines.append("Number of TOAs: %d" % psr['numtoas']) if psr['curators'] == 'Everyone': lines.append("Curators: Everyone") elif psr['curators']: lines.append("Curators:\n " + "\n ".join(psr['curators'])) else: lines.append("Curators: None") notify.print_info("\n".join(lines), 1) print "--"*25
def write_timfile(toas, timfile, sortkeys=('freq', 'mjd'), flags=(), outname="-", formatter=formatters.tempo2_formatter): """Write TOAs to a timfile. Inputs: toas: A list of TOAs. timfile: Information about the timfile from the DB. flags: A single string containing flags to add to each TOA. sortkeys: A list of keys to sort TOAs by. outname: The output file's name. (Default: stdout) formatter: A formatter function. Outputs: None """ if outname != '-' and os.path.exists(outname): raise errors.FileError("The output timfile sepcified (%s) " "already exists. Doing nothing..." % outname) if not timfile['comments']: raise errors.BadInputError("Timfile (ID: %d) has no comment!" % timfile['timfile_id']) # Sort TOAs utils.sort_by_keys(toas, sortkeys) if outname is '-': tim = sys.stdout else: tim = open(outname, 'w') wrapper = textwrap.TextWrapper(initial_indent="# ", subsequent_indent="# ") tim.write(wrapper.fill(timfile['comments'])+'\n') userinfo = cache.get_userinfo(timfile['user_id']) tim.write("# Created by: %s (%s)\n" % (userinfo['real_name'], userinfo['email_address'])) tim.write("# at: %s\n" % timfile['add_time']) tim.write("# Timfile ID: %d\n" % timfile['timfile_id']) tim.write("# (Automatically generated by TOASTER)\n") lines = formatter(toas, flags) tim.write("\n".join(lines)+"\n") if outname != '-': tim.close() notify.print_info("Successfully wrote %d TOAs to timfile (%s)" % (len(toas), outname), 1)
def __insert_rawfile_diagnostic_plot(rawfile_id, diag, existdb=None): """Insert rawfile plot diagnostic. Inputs: rawfile_id: The ID number of the raw file for which to insert the diagnostic. diag: A FloatDiagnostic object. existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ # Connect to the database db = existdb or database.Database() db.connect() diagpath = None # Initialise diagplot in case an exception is raised try: # Put diagnostic plot next to the data file diagplot = os.path.abspath(diag.diagnostic) archivedir = os.path.split(os.path.abspath(diag.fn))[0] diagpath = datafile.archive_file(diagplot, archivedir) diagdir, diagfn = os.path.split(os.path.abspath(diagpath)) ins = db.raw_diagnostic_plots.insert() values = {'rawfile_id':rawfile_id, \ 'filename':diagfn, \ 'filepath':diagdir, \ 'plot_type':diag.name} result = db.execute(ins, values) diag_id = result.inserted_primary_key[0] result.close() notify.print_info("Inserted rawfile diagnostic plot (type: %s)." % \ diag.name, 2) except: # Move the diagnostic plot back if it has already been archived. if diagpath and os.path.isfile(diagpath): shutil.move(diagpath, diagplot) raise finally: if not existdb: # Close DB connection db.close() return diag_id
def diagnose_rawfile(rawfile_id, diagnostic, value=None, existdb=None): """Diagnose a rawfile (specified by its ID number). Inputs: rawfile_id: The ID number of the rawfile to diagnose. diagnostic: The name of the diagnostic. value: The value of the diagnostic. - If the value provided is None, try to compute a pre-defined diagnostic. - If the value provided is a string, assume it is a plot-diagnostic. - If the value provided is a floating-point number, assume it is a numeric-diagnostic. existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: diag: The Diagnostic object. """ db = existdb or database.Database() db.connect() fn = utils.get_rawfile_from_id(rawfile_id, existdb=db) if value is None: # Pre-defined diagnostic diagcls = diagnostics.get_diagnostic_class(diagnostic) # Pre-check if a diagnostic with this name already exists for # the rawfile provided check_rawfile_diagnostic_existence(rawfile_id, diagcls.name, \ existdb=db) diag = diagcls(fn) elif type(value) == types.FloatType: # Numeric diagnostic notify.print_info("Custom floating-point rawfile diagnostic provided", 2) diag = diagnostics.get_custom_float_diagnostic(fn, diagnostic, \ value) else: # Plot diagnostic notify.print_info("Custom rawfile diagnostic plot provided", 2) diag = diagnostics.get_custom_diagnostic_plot(fn, diagnostic, \ value) if existdb is None: db.close() return diag
def reduce_rawfile(args, leftover_args=[], existdb=None): if args.rawfile is not None: notify.print_info("Loading rawfile %s" % args.rawfile, 1) args.rawfile_id = load_rawfile.load_rawfile(args.rawfile, existdb) elif args.rawfile_id is None: # Neither a rawfile, nor a rawfile_id was provided raise errors.BadInputError("Either a rawfile, or a rawfile_id " "_must_ be provided!") if args.parfile is not None: notify.print_info("Loading parfile %s" % args.parfile, 1) args.parfile_id = load_parfile.load_parfile(args.parfile, existdb=existdb) if args.template is not None: notify.print_info("Loading template %s" % args.template, 1) args.template_id = load_template.load_template(args.template, existdb=existdb) rawfile_info = rawfiles_general.get_rawfile_info(args.rawfile_id, existdb=existdb) if args.use_parfile: if args.parfile_id is None: args.parfile_id = parfiles_general.get_master_parfile( rawfile_info['pulsar_id'])[0] if args.parfile_id is None: raise errors.NoMasterError("A master parfile is required " "in the database if no parfile is " "provided on the command line.") else: args.parfile_id = None if args.template_id is None: args.template_id = templates_general.get_master_template( rawfile_info['pulsar_id'], rawfile_info['obssystem_id'], existdb=existdb)[0] if args.template_id is None: raise errors.NoMasterError("A master template is required " "in the database if no template is " "provided on the command line.") notify.print_info("Using the following IDs:\n" " rawfile_id: %s\n" " parfile_id: %s\n" " template_id: %s" % (args.rawfile_id, args.parfile_id, args.template_id), 1) # Load manipulator manip = manipulators.load_manipulator(args.manip_name) manip.parse_args(leftover_args) # Run pipeline core pipeline_core(manip, args.rawfile_id, args.parfile_id, args.template_id, existdb)
def load_toas(toainfo, existdb=None): """Upload a TOA to the database. Inputs: toainfo: A list of dictionaries, each with information for a TOA. existdb: A (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ if not toainfo: raise errors.BadInputError("No TOA info was provided!") # Use the existing DB connection, or open a new one if None was provided db = existdb or database.Database() db.connect() db.begin() # Open a transaction # Write values to the toa table ins = db.toas.insert() toa_ids = [] for values in toainfo: if 'toa_id' in values: raise errors.BadTOAFormat("TOA has already been loaded? " "TOA ID: %d" % values['toa_id']) result = db.execute(ins, values) toa_id = result.inserted_primary_key[0] result.close() toa_ids.append(toa_id) values['toa_id'] = toa_id db.commit() if len(toa_ids) > 1: notify.print_info("Added %d TOAs to DB." % len(toa_ids), 2) else: notify.print_info("Added TOA to DB.", 2) if not existdb: # Close the DB connection we opened db.close() return toa_ids
def is_gitrepo_dirty(repodir): """Return True if the git repository has local changes. Inputs: repodir: The location of the git repository. Output: is_dirty: True if git repository has local changes. False otherwise. """ notify.print_info("Checking if Git repo at '%s' is dirty..." % repodir, 2) try: cmd = ["git", "diff", "--quiet"] stdout, stderr = utils.execute(cmd, execdir=repodir) except errors.SystemCallError: # Exit code is non-zero return True else: # Success error code (i.e. no differences) return False
def show_parfiles(parfiles): if len(parfiles): print "--"*25 for parfile in parfiles: print colour.cstring("Parfile ID:", underline=True, bold=True) + \ colour.cstring(" %d" % parfile['parfile_id'], bold=True) fn = os.path.join(parfile['filepath'], parfile['filename']) print "\nParfile: %s" % fn print "Pulsar name: %s" % parfile['pulsar_name'] print "Master parfile? %s" % \ (((parfile['mparid'] is not None) and "Yes") or "No") print "Date and time parfile was added: %s" % \ parfile['add_time'].isoformat(' ') msg = "Parfile contents:\n\n" for line in open(fn, 'r'): msg += "%s\n" % line.strip() notify.print_info(msg, 1) print "--"*25 else: raise errors.ToasterError("No parfiles match parameters provided!")
def is_gitrepo(repodir): """Return True if the given dir is a git repository. Input: repodir: The location of the git repository. Output: is_git: True if directory is part of a git repository. False otherwise. """ notify.print_info("Checking if directory '%s' contains a Git repo..." % repodir, 2) try: cmd = ["git", "rev-parse"] stdout, stderr = utils.execute(cmd, execdir=repodir, \ stderr=open(os.devnull)) except errors.SystemCallError: # Exit code is non-zero return False else: # Success error code (i.e. dir is in a git repo) return True
def load_rawfile(fn, existdb=None): # Connect to the database db = existdb or database.Database() db.connect() try: # Enter information in rawfiles table notify.print_info("Working on %s (%s)" % (fn, utils.give_utc_now()), 1) # Check the file and parse the header params = datafile.prep_file(fn) # Move the File destdir = datafile.get_archive_dir(fn, params=params) newfn = datafile.archive_file(fn, destdir) notify.print_info("%s moved to %s (%s)" % (fn, newfn, utils.give_utc_now()), 1) # Register the file into the database rawfile_id = populate_rawfiles_table(db, newfn, params) notify.print_info("Successfully loaded %s - rawfile_id=%d (%s)" % (fn, rawfile_id, utils.give_utc_now()), 1) finally: if not existdb: # Close DB connection db.close() return rawfile_id
def sort_by_keys(tosort, keys): """Sort a list of dictionaries, or database rows by the list of keys provided. Keys provided later in the list take precedence over earlier ones. If a key ends in '_r' sorting by that key will happen in reverse. Inputs: tosort: The list to sort. keys: The keys to use for sorting. Outputs: None - sorting is done in-place. """ if not tosort: return tosort notify.print_info("Sorting by keys (%s)" % " then ".join(keys), 3) for sortkey in keys: if sortkey.endswith("_r"): sortkey = sortkey[:-2] rev = True notify.print_info("Reverse sorting by %s..." % sortkey, 2) else: rev = False notify.print_info("Sorting by %s..." % sortkey, 2) if type(tosort[0][sortkey]) is types.StringType: tosort.sort(key=lambda x: x[sortkey].lower(), reverse=rev) else: tosort.sort(key=lambda x: x[sortkey], reverse=rev)
def _compute(self): notify.print_info("Creating composite summary plot for %s" % self.fn, 3) handle, tmpfn = tempfile.mkstemp(suffix=".png") os.close(handle) params = datafile.prep_file(self.fn) if (params["nsub"] > 1) and (params["nchan"] > 1): self.__plot_all(tmpfn, params) elif (params["nsub"] > 1) and (params["nchan"] == 1): self.__plot_nofreq(tmpfn, params) elif (params["nsub"] == 1) and (params["nchan"] > 1): self.__plot_notime(tmpfn, params) elif (params["nsub"] == 1) and (params["nchan"] == 1): self.__plot_profonly(tmpfn, params) else: raise errors.FileError( "Not sure how to plot diagnostic for file. " "(nsub: %d; nchan: %d)" % (params["nsub"], params["nchan"]) ) tmpdir = os.path.split(tmpfn)[0] archivefn = os.path.split(self.fn)[-1] pngfn = os.path.join(tmpdir, archivefn + ".composite.png") shutil.move(tmpfn, pngfn) return pngfn
def show_procjobs(procjobs): print "--" * 25 for procjob in procjobs: print colour.cstring("Process Id:", underline=True, bold=True) + colour.cstring( " %d" % procjob.process_id, bold=True ) print "\nPulsar name: %s" % cache.get_pulsarname(procjob.pulsar_id) print "Rawfile (ID=%d): %s" % (procjob.rawfile_id, procjob.rawfn) if procjob.replacement_rawfile_id is not None: colour.cprint("Rawfile has been superseded by rawfile_id=%d" % procjob.replacement_rawfile_id, "warning") print "Manipulator: %s" % procjob.manipulator print " Args: %s" % procjob.manipulator_args print "Number of freq. chunks: %d" % procjob.nchan print "Number of time chunks: %d" % procjob.nsub print "Uploaded by: %s (%s)" % (procjob.real_name, procjob.email_address) print "Date and time job completed: %s" % procjob.add_time.isoformat(" ") if config.cfg.verbosity >= 1: lines = ["Template (ID=%d): %s" % (procjob.template_id, procjob.tempfn)] if procjob.parfile_id is not None: lines.append("Parfile (ID=%d): %s" % (procjob.parfile_id, procjob.parfn)) else: lines.append("No parfile installed during processing.") notify.print_info("\n".join(lines), 1) print "--" * 25
def insert_rawfile_diagnostics(rawfile_id, diags, existdb=None): """Insert rawfile diagnostics, carefully checking if each diagnostic is float-valued, or plot-valued. Inputs: rawfile_id: The ID number of the raw file for which to insert the diagnostic. diags: A list of computed diagnostics. existdb: An (optional) existing database connection object. (Default: Establish a db connection) Outputs: None """ # Connect to the database db = existdb or database.Database() db.connect() notify.print_info("Computing raw file diagnostics for " \ "rawfile (ID: %d)" % rawfile_id, 2) try: for diag in diags: notify.print_info("Computing %s diagnostic" % diag.name, 3) trans = db.begin() try: check_rawfile_diagnostic_existence(rawfile_id, diag.name, \ existdb=db) if isinstance(diag, diagnostics.base.FloatDiagnostic): __insert_rawfile_float_diagnostic(rawfile_id, diag, \ existdb=db) elif isinstance(diag, diagnostics.base.PlotDiagnostic): __insert_rawfile_diagnostic_plot(rawfile_id, diag, \ existdb=db) else: trans.rollback() raise ValueError("Diagnostic is not a valid type (%s)!" % \ type(diag)) except errors.DiagnosticAlreadyExists, e: notify.print_info("Diagnostic already exists: %s. Skipping..." % \ str(e), 2) trans.rollback() else: trans.commit() finally: if not existdb: # Close DB connection db.close()
def load_template(fn, comments, is_master=False, existdb=None): # Connect to the database db = existdb or database.Database() db.connect() try: # Now load the template file into database notify.print_info("Working on %s (%s)" % (fn, utils.give_utc_now()), 1) # Check the template and parse the header params = datafile.prep_file(fn) # Move the file destdir = datafile.get_archive_dir(fn, params=params) newfn = datafile.archive_file(fn, destdir) # Register the template into the database template_id = populate_templates_table(db, newfn, params, comments=comments) mastertemp_id, tempfn = general.get_master_template(params['pulsar_id'], params['obssystem_id']) if mastertemp_id is None: # If this is the only template for this pulsar # make sure it will be set as the master is_master = True if is_master: notify.print_info("Setting %s as master template (%s)" % (newfn, utils.give_utc_now()), 1) general.set_as_master_template(template_id, db) notify.print_info("Finished with %s - template_id=%d (%s)" % (fn, template_id, utils.give_utc_now()), 1) finally: if not existdb: # Close DB connection db.close() return template_id
def load_parfile(fn, is_master=False, existdb=None): # Connect to the database db = existdb or database.Database() db.connect() try: # Now load the parfile file into database notify.print_info("Working on %s (%s)" % (fn, utils.give_utc_now()), 1) # Check the parfile and parse it params = general.prep_parfile(fn) # Archive the parfile destdir = os.path.join(config.cfg.data_archive_location, 'parfiles', params['name']) newfn = datafile.archive_file(fn, destdir) # Register the parfile into the database parfile_id = populate_parfiles_table(db, newfn, params) masterpar_id, parfn = general.get_master_parfile(params['pulsar_id']) if masterpar_id is None: # If this is the only parfile for this pulsar # make sure it will be set as the master is_master = True if is_master: notify.print_info("Setting %s as master parfile (%s)" % (newfn, utils.give_utc_now()), 1) general.set_as_master_parfile(parfile_id, db) notify.print_info("Finished with %s - parfile_id=%d (%s)" % (fn, parfile_id, utils.give_utc_now()), 1) finally: if not existdb: # Close DB connection db.close() return parfile_id
def make_proc_diagnostics_dir(fn, proc_id): """Given an archive, create the appropriate diagnostics directory, and cross-references. Inputs: fn: The file to create a diagnostic directory for. proc_id: The processing ID number to create a diagnostic directory for. Outputs: diagdir: The diagnostic directory's name. """ diagnostics_location = os.path.join(config.cfg.data_archive_location, "diagnostics") params = datafile.prep_file(fn) basedir = datafile.get_archive_dir(fn, params=params, data_archive_location=diagnostics_location) diagdir = os.path.join(basedir, "procid_%d" % proc_id) # Make sure directory exists if not os.path.isdir(diagdir): # Create directory notify.print_info("Making diagnostic directory: %s" % diagdir, 2) os.makedirs(diagdir, 0770) crossrefdir = os.path.join(diagnostics_location, "processing") if not os.path.isdir(crossrefdir): # Create directory notify.print_info("Making diagnostic crossref diagdir: %s" % crossrefdir, 2) os.makedirs(crossrefdir, 0770) crossref = os.path.join(crossrefdir, "procid_%d" % proc_id) if not os.path.islink(crossref): # Create symlink notify.print_info("Making crossref to diagnostic diagdir: %s" % crossref, 2) os.symlink(diagdir, crossref) return diagdir