Ejemplo n.º 1
0
 def setUp(self):
     self.files = [1, 2, 3]
     self.still = 0
     self.task = 'UVC'  # Jon : change me : HARDWF
     self.sg = SpawnerClass()
     self.sg.config_file = "still_test_paper.cfg"
     self.wf = WorkFlow()
     process_client_config_file(self.sg, self.wf)
Ejemplo n.º 2
0
 def setUp(self):
     self.ntimes = 10
     self.npols = 4
     self.dbi = PopulatedDataBaseInterface(self.ntimes, self.npols, test=True)
     self.files = self.dbi.list_observations()
     self.sg = SpawnerClass()
     self.sg.config_file = "still_test_paper.cfg"
     self.wf = WorkFlow()
     process_client_config_file(self.sg, self.wf)
     self.task_clients = [TaskClient(self.dbi, 'localhost', self.wf, port=TEST_PORT)]
Ejemplo n.º 3
0
def main():

    parser = argparse.ArgumentParser(
        description='MWA - Add observations to Workflow Manager')

    parser.add_argument(
        '--config_file',
        dest='config_file',
        required=False,
        help=
        "Specify the complete path to the config file, by default we'll use etc/still.cfg"
    )
    parser.add_argument('-o',
                        dest='obsnums',
                        required=False,
                        nargs='+',
                        help="List of obervations seperated by spaces")
    parser.add_argument(
        '--generate',
        dest='generate',
        required=False,
        help=
        "Generates a list of unprocessed obsnums, note that projID and filecount have not been checked"
    )

    parser.set_defaults(config_file="%setc/still.cfg" % basedir)

    args, unknown = parser.parse_known_args()

    sg = SpawnerClass()
    wf = WorkFlow()

    sg.config_file = args.config_file
    process_client_config_file(sg, wf)
    dbi = get_dbi_from_config(args.config_file)
    dbi.test_db(
    )  # Testing the database to make sure we made a connection, its fun..
    if args.generate:
        ingest_addtional_opsids(sg)
    else:
        for obsid in args.obsnums:
            print("Obsid: %s") % obsid
            dbi.add_observation(obsid,
                                obsid,
                                "GPS",
                                None,
                                None,
                                None,
                                outputhost=None,
                                length=None,
                                status='NEW')
Ejemplo n.º 4
0
    def setUp(self):
        self.nfiles = 10
        dbi = FakeDataBaseInterface(self.nfiles)
        self.dbi = dbi
        self.sg = SpawnerClass()
        self.sg.config_file = "still_test_paper.cfg"
        self.wf = WorkFlow()
        process_client_config_file(self.sg, self.wf)

        class FakeAction(sch.Action):
            def run_remote_task(self):
                dbi.files[self.filename] = self.task
        self.FakeAction = FakeAction
        self.task_clients = TaskClient(dbi, 'localhost', self.wf, port=TEST_PORT)
Ejemplo n.º 5
0
def main(args):
    o = optparse.OptionParser()
    o.set_usage('load_observations_librarian.py')
    o.set_description(__doc__)
    o.add_option(
        '--connection',
        help=
        'the name of the Librarian connection to use (as in ~/.hl_client.cfg)')
    o.add_option('--config_file',
                 help='RTP configuration file default=RTP/etc/still.cfg',
                 default='etc/still.cfg')
    o.add_option(
        '--source',
        help=
        'Only load files originating from the named "source" (default "%default")',
        default='correlator')
    opts, args = o.parse_args(args)

    # Some boilerplate to set up the database interface ...
    spawner = SpawnerClass()
    workflow = WorkFlow()
    spawner.config_file = os.path.join(basedir, opts.config_file)
    process_client_config_file(spawner, workflow)
    dbi = get_dbi_from_config(spawner.config_file)
    dbi.test_db()

    # Get the list of potentially-relevant files from the Librarian.

    lc = hera_librarian.LibrarianClient(opts.connection)
    try:
        listing = lc.describe_session_without_event(opts.source,
                                                    rtp_ingested_key)
    except hera_librarian.RPCError as e:
        print('RPC to librarian failed: %s' % e.message)
        sys.exit(1)

    if not listing['any_matching']:
        print('No new sessions.')
        return

    # For each record we get a dict of at least
    #
    #   date        -- the start Julian Date of the observation
    #   pol         -- the polarization of the data ("xx" or "yy")
    #   store_path  -- the path of a file instance *within* a store
    #   path_prefix -- the store's path prefix, used to construct full paths
    #   host        -- the hostname of the store
    #   length      -- the duration of the observation in days
    #
    # This is a pretty good start ... because of course the Librarian's API
    # call has been engineered to give us what we need.

    def augment_record(r):
        return {
            'obsnum': os.path.basename(
                r['store_path']),  # NOTE: this is actually free text
            'date': r['date'],
            'date_type': 'julian',
            'pol': r['pol'],
            'host': r['host'],
            'filename': os.path.join(r['path_prefix'], r['store_path']),
            'path_prefix': r['path_prefix'],
            'outputhost': '',
            'status': initial_status,
            'length': r['length'],
        }

    obsinfos = [augment_record(r) for r in listing['info']]

    # Now we need to fill in the "neighbor" information.

    pols = set(oi['pol'] for oi in obsinfos)

    for pol in pols:
        soi = sorted((oi for oi in obsinfos if oi['pol'] == pol),
                     key=lambda oi: oi['date'])

        for i in xrange(len(soi)):
            oi_this = soi[i]

            if i > 0:
                oi_prev = soi[i - 1]

                if (oi_this['date'] - oi_prev['date']) < (1.2 *
                                                          oi_this['length']):
                    oi_this['neighbor_low'] = oi_prev['date']

            if i < len(soi) - 1:
                oi_next = soi[i + 1]

                if (oi_next['date'] - oi_this['date']) < (1.2 *
                                                          oi_this['length']):
                    oi_this['neighbor_high'] = oi_next['date']

    # Now that we've computed everything, avoid duplicating files that we
    # already know about. We shouldn't end up ever trying to submit
    # duplicates, but in practice ...

    from sqlalchemy.orm.exc import NoResultFound

    def not_already_seen(oi):
        try:
            obs = dbi.get_obs(oi['obsnum'])
            return False
        except NoResultFound:
            return True

    n_before = len(obsinfos)
    obsinfos = [oi for oi in obsinfos if not_already_seen(oi)]
    if len(obsinfos) != n_before:
        print('Dropping %d already-ingested records.' %
              (n_before - len(obsinfos)))

    if not len(obsinfos):
        print('Nothing to add.')
        return

    # Try ingesting into the RTP.

    try:
        print('Attempting to add %d observations to the still ...' %
              len(obsinfos))
        dbi.add_observations(obsinfos, initial_status)
    except Exception as e:
        print('addition failed! here\'s what was attempted:', file=sys.stderr)
        print('', file=sys.stderr)
        for oi in obsinfos:
            print(oi, file=sys.stderr)
        raise

    # Add events to the Librarian indicating that these files were
    # successfully ingested into the RTP.

    for oi in obsinfos:
        lc.create_file_event(os.path.basename(oi['filename']),
                             rtp_ingested_key)
Ejemplo n.º 6
0
parser.add_argument('--config_file',
                    dest='config_file',
                    required=False,
                    help="Specify the complete path to the config file")

parser.add_argument('jds',
                    nargs='+',
                    type=str,
                    metavar='JD',
                    help="List of integer julian dates to reset.")

parser.set_defaults(config_file="%setc/still.cfg" % basedir)

args = parser.parse_args()

sg = SpawnerClass()
wf = WorkFlow()

sg.config_file = args.config_file
process_client_config_file(sg, wf)
if args.status == '':
    args.status = wf.workflow_actions[0]

# connect to the database
dbi = StillDataBaseInterface(sg.dbhost,
                             sg.dbport,
                             sg.dbtype,
                             sg.dbname,
                             sg.dbuser,
                             sg.dbpasswd,
                             test=False)
Ejemplo n.º 7
0
def main(args):
    # define option parsing function
    o = optparse.OptionParser()
    o.set_usage('rtp_summary.py')
    o.set_description(__doc__)
    o.add_option('--config_file', help='RTP configuration file; default=etc/rtp_hera_h1c.cfg',
                 default='etc/rtp_hera_h1c.cfg', type=str)
    o.add_option('--date', help='JD for which to generate the status report;'
                 ' defaults to the current JD, minus 1 (i.e., the previous night\'s observation)',
                 default=0, type=int)
    opts, args = o.parse_args(args)

    # create a database interface
    spawner = SpawnerClass()
    spawner.config_file = os.path.join(basedir, opts.config_file)
    dbi = get_dbi_from_config(spawner.config_file)
    dbi.test_db()

    # summarize the status of the specified JD's observation
    if opts.date == 0:
        # get today's JD, subtract one to get yesterday, and make a new time
        # object for computing the (Gregorian) calendar date
        t = Time.now()
        jd_of_interest = int(t.jd) - 1
        t = Time(val=jd_of_interest, format='jd')
    else:
        t = Time(val=opts.date, format='jd')
        jd_of_interest = opts.date
    # convert JD to unix epoch to create datetime object, for writing human dates
    date = datetime.datetime.fromtimestamp(t.unix)
    datestr = date.strftime('%a %b %d, %Y')

    # query database
    s = dbi.Session()
    obsnums = s.query(Observation).filter(
        Observation.obsnum.like('zen.{:d}%'.format(jd_of_interest))).all()
    nobs = len(obsnums)

    if nobs == 0:
        print("No observations for JD {0:d} ({1})\n".format(jd_of_interest, datestr))
        return
    else:
        # categorize observations
        ncomplete = 0
        nworking = 0
        nfailed = 0
        for obs in obsnums:
            if obs.status == "COMPLETE":
                ncomplete += 1
            elif (obs.current_stage_in_progress == "FAILED"
                  or obs.current_stage_in_progress == "KILLED"):
                nfailed += 1
            else:
                nworking += 1
        s.close()

        # make sure we didn't have any observations fall through the cracks
        if ncomplete + nfailed + nworking != nobs:
            print("Had {:d} total observations, {:d} complete, {:d} working, {:d} failed;"
                  " totals don\'t match!".format(nobs, ncomplete, nworking, nfailed))
            return

        # write out report
        pct_comp = ncomplete / nobs * 100
        pct_work = nworking / nobs * 100
        pct_fail = nfailed / nobs * 100
        print("RTP report for JD {0:d} ({1})\n".format(jd_of_interest, datestr))
        print("Number of observations: {:d}".format(nobs))
        print("Number finished processing: {0:d} ({1:d}%)".format(ncomplete, int(pct_comp)))
        print("Number currently processing: {0:d} ({1:d}%)".format(nworking, int(pct_work)))
        print("Number failed: {0:d} ({1:d}%)".format(nfailed, int(pct_fail)))
        return
Ejemplo n.º 8
0
 def setUp(self):
     self.dbi = FakeDataBaseInterface()
     self.sg = SpawnerClass()
     self.sg.config_file = "still_test_paper.cfg"
     self.wf = WorkFlow()
     process_client_config_file(self.sg, self.wf)