Exemple #1
0
    def run(self):
        """
        Run the script
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Write observation into logger
        if self._logTerse():
            self._log('\n')
            self._log.header1(gammalib.number('Observation',len(self._obs)))
            self._log(str(self._obs))
            self._log('\n')

        # Signal if stacked analysis is requested
        stacked = self._obs.size() > 1 and self._enumbins > 0

        # If several observations and binned: prepare stacked irfs
        if stacked:
            self._set_stacked_irf()

        # Write header
        if self._logTerse():
            self._log('\n')
            self._log.header1('Generate pull distribution')

        # Loop over trials
        for seed in range(self._ntrials):

            # Make a trial and add initial seed
            result = self._trial(seed + self._seed, stacked)

            # Write out result immediately
            if seed == 0:
                f      = open(self._outfile.url(), 'w')
                writer = csv.DictWriter(f, result['colnames'])
                headers = {}
                for n in result['colnames']:
                    headers[n] = n
                writer.writerow(headers)
            else:
                f = open(self._outfile.url(), 'a')
            writer = csv.DictWriter(f, result['colnames'])
            writer.writerow(result['values'])
            f.close()

        # Return
        return
Exemple #2
0
    def run(self):
        """
        Run the script
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Write observation into logger
        self._log_observations(gammalib.NORMAL, self.obs(), 'Observation')

        # Get time boundaries
        tmin = self._tbins.tstart(0)
        tmax = self._tbins.tstop(self._tbins.size() - 1)

        # Select events
        select = ctools.ctselect(self.obs())
        select['emin'] = self['emin'].real()
        select['emax'] = self['emax'].real()
        select['tmin'] = tmin.convert(ctools.time_reference)
        select['tmax'] = tmax.convert(ctools.time_reference)
        select['rad'] = 'UNDEFINED'
        select['ra'] = 'UNDEFINED'
        select['dec'] = 'UNDEFINED'
        select.run()

        # Extract observations
        self.obs(select.obs().copy())

        # Write observation into logger
        self._log_header1(
            gammalib.TERSE,
            gammalib.number('Selected observation', len(self.obs())))

        # Adjust model parameters dependent on user parameters
        self._adjust_model_pars()

        # Write header
        self._log_header1(gammalib.TERSE, 'Generate lightcurve')

        # Initialise list of result dictionaries
        results = []

        # Get source parameters
        pars = self._get_free_par_names()

        # Loop over time bins
        for i in range(self._tbins.size()):

            # Get time boundaries
            tmin = self._tbins.tstart(i)
            tmax = self._tbins.tstop(i)

            # Write time bin into header
            self._log_header2(gammalib.TERSE,
                              'MJD %f - %f ' % (tmin.mjd(), tmax.mjd()))

            # Compute time bin center and time width
            twidth = 0.5 * (tmax - tmin)  # in seconds
            tmean = tmin + twidth

            # Initialise result dictionary
            result = {
                'mjd': tmean.mjd(),
                'e_mjd': twidth / gammalib.sec_in_day,
                'ts': 0.0,
                'ul_diff': 0.0,
                'ul_flux': 0.0,
                'ul_eflux': 0.0,
                'pars': pars,
                'values': {}
            }

            # Log information
            self._log_header3(gammalib.EXPLICIT, 'Selecting events')

            # Select events
            select = ctools.ctselect(self.obs())
            select['emin'] = self['emin'].real()
            select['emax'] = self['emax'].real()
            select['tmin'] = tmin.convert(ctools.time_reference)
            select['tmax'] = tmax.convert(ctools.time_reference)
            select['rad'] = 'UNDEFINED'
            select['ra'] = 'UNDEFINED'
            select['dec'] = 'UNDEFINED'
            select.run()

            # Retrieve observation
            obs = select.obs()

            # Deal with stacked and On/Off Observations
            if self._stacked or self._onoff:

                # If a stacked analysis is requested bin the events
                # and compute the stacked response functions and setup
                # an observation container with a single stacked observation.
                if self._stacked:
                    new_obs = obsutils.get_stacked_obs(self, obs)

                # ... otherwise if On/Off analysis is requested generate
                # the On/Off observations and response
                elif self._onoff:
                    new_obs = obsutils.get_onoff_obs(self, obs)

                # Extract models
                models = new_obs.models()

                # Fix background models if required
                if self['fix_bkg'].boolean():
                    for model in models:
                        if model.classname() != 'GModelSky':
                            for par in model:
                                par.fix()

                # Put back models
                new_obs.models(models)

                # Continue with new oberservation container
                obs = new_obs

            # Header
            self._log_header3(gammalib.EXPLICIT, 'Fitting the data')

            # Do maximum likelihood model fitting
            if obs.size() > 0:
                like = ctools.ctlike(obs)
                like['edisp'] = self['edisp'].boolean()
                like.run()

                # Skip bin if no event was present
                if like.obs().logL() == 0.0:

                    # Signal skipping of bin
                    self._log_value(gammalib.TERSE, 'Warning',
                                    'No event in this time bin, skip bin.')

                    # Set all results to 0
                    for par in pars:
                        result['values'][par] = 0.0
                        result['values']['e_' + par] = 0.0

                    # Append result
                    results.append(result)

                    # Continue with next time bin
                    continue

                # Retrieve model fitting results for source of interest
                source = like.obs().models()[self._srcname]

                # Extract parameter values
                for par in pars:
                    result['values'][par] = source[par].value()
                    result['values']['e_' + par] = source[par].error()

                # Calculate upper limit (-1 if not computed)
                #ul_diff, ul_flux, ul_eflux = self._compute_ulimit(like.obs())
                ul_diff, ul_flux, ul_eflux = self._compute_ulimit(obs)
                if ul_diff > 0.0:
                    result['ul_diff'] = ul_diff
                    result['ul_flux'] = ul_flux
                    result['ul_eflux'] = ul_eflux

                # Extract Test Statistic value
                if self['calc_ts'].boolean():
                    result['ts'] = source.ts()

                # Append result to list of dictionaries
                results.append(result)

                # Log results for this time bin
                self._log.header3('Results')
                pars = self._get_free_par_names()
                for par in pars:
                    value = source[par].value()
                    error = source[par].error()
                    unit = source[par].unit()
                    self._log_value(
                        gammalib.NORMAL, par,
                        str(value) + ' +/- ' + str(error) + ' ' + unit)
                if ul_diff > 0.0:
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_diff']) + ' ph/cm2/s/MeV')
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_flux']) + ' ph/cm2/s')
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_eflux']) + ' erg/cm2/s')
                if self['calc_ts'].boolean():
                    self._log_value(gammalib.NORMAL, 'Test Statistic',
                                    result['ts'])

            # Otherwise, if observations size is 0, signal bin is skipped and
            # fill results table with zeros
            else:
                self._log_value(
                    gammalib.TERSE, 'Warning',
                    'No observations available in this time bin, '
                    'skip bin.')

                # Set all results to 0
                for par in pars:
                    result['values'][par] = 0.0
                    result['values']['e_' + par] = 0.0

                # Append result
                results.append(result)

                # Continue with next time bin
                continue

        # Create FITS table from results
        table = self._create_fits_table(results)

        # Create FITS file and append FITS table to FITS file
        self._fits = gammalib.GFits()
        self._fits.append(table)

        # Optionally publish light curve
        if self['publish'].boolean():
            self.publish()

        # Return
        return
Exemple #3
0
    def _parse_workflow(self):

        # Get workflow
        workflow = self._workflow.element('workflow')
        
        # Get number of actors
        num_actors = workflow.elements('actor')

        # Initialise actors
        self._actors = []

        # Loop over all actors
        for i in range(num_actors):
        
            # Get actor
            actor = workflow.element('actor', i)

            # Initialise parameters and input actors
            input_parameters  = []
            output_parameters = []
            input_actors      = []
            output_actors     = []

            # Get actor attributes
            name = actor.attribute('name')
            tool = actor.attribute('tool')

            # Get actor input parameters
            if actor.elements('input') > 0:
                actor_inputs = actor.element('input')
                num_inputs   = actor_inputs.elements('parameter')
                for k in range(num_inputs):
                    input_par   = actor_inputs.element('parameter', k)
                    input_name  = input_par.attribute('name')
                    input_value = input_par.attribute('value')
                    input_actor = input_par.attribute('actor')
                    parameter   = {'name': input_name, \
                                   'value': input_value, \
                                   'actor': input_actor}
                    input_parameters.append(parameter)
                    if input_actor != '':
                        if input_actor not in input_actors:
                            input_actors.append(input_actor)

            # Get actor output parameters
            if actor.elements('output') > 0:
                actor_output = actor.element('output')
                num_outputs  = actor_output.elements('parameter')
                for k in range(num_outputs):
                    output_par   = actor_output.element('parameter', k)
                    output_name  = output_par.attribute('name')
                    output_value = output_par.attribute('value')
                    output_actor = output_par.attribute('actor')
                    parameter    = {'name': output_name, \
                                    'value': output_value, \
                                    'actor': output_actor}
                    output_parameters.append(parameter)
                    if output_actor != '':
                        if output_actor not in output_actors:
                            output_actors.append(output_actor)

            # Determine number of dependencies
            num_inputs = len(input_actors)

            # Set actor status
            if num_inputs > 0:
                status = 'waiting for input'
            else:
                status = 'ready'

            # Create actor entry
            entry = {'name': name,
                     'tool': tool,
                     'input_parameters': input_parameters,
                     'input_actors': input_actors,
                     'output_parameters': output_parameters,
                     'output_actors': output_actors,
                     'status': status}

            # Append entry
            self._actors.append(entry)

            # Log information about actors
            self._log_value(gammalib.NORMAL, 'Actor "%s"' % name, tool)

            # Compute list of predecessors
            if num_inputs == 0:
                predecessors = 'none'
            else:
                predecessors = ''
                for k in range(num_inputs):
                    if k > 0:
                        predecessors += ', '
                    predecessors += '"'+input_actors[k]+'"'
 
            # Log predecessors
            self._log_value(gammalib.NORMAL,
                            gammalib.number('  Predecessor', num_inputs),
                            predecessors)

        # Return
        return
Exemple #4
0
    def run(self):
        """
        Run the script
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Initialise arrays to store certain values for reuse
        # Todo, think about using a python dictionary
        self._offsets = []
        self._zeniths = []
        self._azimuths = []
        self._pnt_ra = []
        self._pnt_dec = []
        self._ebounds = gammalib.GEbounds()
        self._gti = gammalib.GGti()
        obs_names = []

        # Initialise output to be filled
        ontime = 0.0
        livetime = 0.0
        n_events = 0
        n_eventbins = 0
        n_obs_binned = 0
        n_obs_unbinned = 0

        # Write header
        if self._logTerse():
            self._log('\n')
            self._log.header1(gammalib.number('Observation',
                                              self.obs().size()))

        # Loop over observations
        for obs in self.obs():

            # Skip non-CTA observations
            if not obs.classname() == 'GCTAObservation':
                self._log('Skipping ' + obs.instrument() + ' observation\n')
                continue

            # Use observed object as observation name if name is not given
            obs_name = obs.name()
            if obs_name == '':
                obs_name = obs.object()

            # Logging
            if self._logExplicit():
                obs_id = obs.id()
                if obs_id != '':
                    log_name = obs_name + ' (ID=' + obs_id + ')'
                else:
                    log_name = obs_name
                self._log.header2(log_name)

            # Retrieve observation name
            obs_names.append(obs_name)

            # Retrieve energy boundaries
            obs_bounds = obs.events().ebounds()

            # Retrieve time interval
            obs_gti = obs.events().gti()

            # Compute mean time and dead time fraction in percent
            deadfrac = (1.0 - obs.deadc()) * 100.0

            # Retrieve pointing and store Ra,Dec
            pnt_dir = obs.pointing().dir()
            self._pnt_ra.append(pnt_dir.ra_deg())
            self._pnt_dec.append(pnt_dir.dec_deg())

            # If avaliable append energy boundaries
            if obs_bounds.size() > 0:
                self._ebounds.append(obs_bounds.emin(), obs_bounds.emax())

            # Append time interval
            self._gti.append(obs_gti.tstart(), obs_gti.tstop())

            # Increment global livetime and ontime
            ontime += obs.ontime()
            livetime += obs.livetime()

            # Bookkeeping
            if obs.eventtype() == 'CountsCube':
                n_eventbins += obs.events().size()
                n_obs_binned += 1
                is_binned = 'yes'
                is_what = 'Number of bins'
            else:
                n_events += obs.events().size()
                n_obs_unbinned += 1
                is_binned = 'no'
                is_what = 'Number of events'
            self._log_value(gammalib.EXPLICIT, 'Binned', is_binned)
            self._log_value(gammalib.EXPLICIT, is_what, obs.events().size())

            # Retrieve zenith and azimuth and store for later use
            zenith = obs.pointing().zenith()
            azimuth = obs.pointing().azimuth()
            self._zeniths.append(zenith)
            self._azimuths.append(azimuth)

            # Optionally compute offset with respect to target direction
            if self._compute_offset:
                offset = pnt_dir.dist_deg(self._obj_dir)
                self._offsets.append(offset)

            # Optionally log details
            if self._logExplicit():

                # Log the observation energy range (if available)
                self._log.parformat('Energy range')
                if obs_bounds.size() == 0:
                    self._log('undefined')
                else:
                    self._log(str(obs_bounds.emin()))
                    self._log(' - ')
                    self._log(str(obs_bounds.emax()))
                self._log('\n')

                # Log observation time interval
                self._log.parformat('Time range (MJD)')
                if obs_gti.size() == 0:
                    self._log('undefined')
                else:
                    self._log(str(obs_gti.tstart().mjd()))
                    self._log(' - ')
                    self._log(str(obs_gti.tstop().mjd()))
                self._log('\n')

            # Log observation information
            self._log_value(gammalib.EXPLICIT, 'Ontime',
                            '%.3f s' % obs.ontime())
            self._log_value(gammalib.EXPLICIT, 'Livetime',
                            '%.3f s' % obs.livetime())
            self._log_value(gammalib.EXPLICIT, 'Deadtime fraction',
                            '%.3f %%' % deadfrac)
            self._log_value(gammalib.EXPLICIT, 'Pointing', pnt_dir)

            # Optionally log offset with respect to target direction
            if self._compute_offset:
                self._log_value(gammalib.EXPLICIT, 'Offset from target',
                                '%.2f deg' % offset)

            # Log Zenith and Azimuth angles
            self._log_value(gammalib.EXPLICIT, 'Zenith angle',
                            '%.2f deg' % zenith)
            self._log_value(gammalib.EXPLICIT, 'Azimuth angle',
                            '%.2f deg' % azimuth)

        # Write summary header
        self._log_header1(gammalib.NORMAL, 'Summary')

        # Log general summary
        self._log_header3(gammalib.NORMAL, 'Observations')
        self._log_value(gammalib.NORMAL, 'Unbinned observations',
                        n_obs_unbinned)
        self._log_value(gammalib.NORMAL, 'Binned observations', n_obs_binned)
        self._log_header3(gammalib.NORMAL, 'Events')
        self._log_value(gammalib.NORMAL, 'Number of events', n_events)
        self._log_value(gammalib.NORMAL, 'Number of bins', n_eventbins)

        # Compute mean offset, azimuth and zenith angle
        if len(self._offsets) > 0:
            mean_offset = '%.2f deg' % (sum(self._offsets) /
                                        len(self._offsets))
        else:
            mean_offset = 'Unknown'
        if len(self._zeniths) > 0:
            mean_zenith = '%.2f deg' % (sum(self._zeniths) /
                                        len(self._zeniths))
        else:
            mean_zenith = 'Unknown'
        if len(self._azimuths) > 0:
            mean_azimuth = '%.2f deg' % (sum(self._azimuths) /
                                         len(self._azimuths))
        else:
            mean_azimuth = 'Unknown'

        # Log mean offset, azimuth and zenith angle
        self._log_header3(gammalib.NORMAL, 'Pointings')
        self._log_value(gammalib.NORMAL, 'Mean offset angle', mean_offset)
        self._log_value(gammalib.NORMAL, 'Mean zenith angle', mean_zenith)
        self._log_value(gammalib.NORMAL, 'Mean azimuth angle', mean_azimuth)

        # Optionally log names of observations. Note that the set class is
        # used to extract all different observation names from the list of
        # observation names, and the set class is only available from
        # Python 2.4 on.
        if sys.version_info >= (2, 4):
            obs_set = set(obs_names)
            for name in obs_set:
                self._log_value(gammalib.EXPLICIT, '"' + name + '"',
                                obs_names.count(name))

        # Get energy boundary information
        if self._ebounds.size() == 0:
            min_value = 'undefined'
            max_value = 'undefined'
        else:
            min_value = str(self._ebounds.emin())
            max_value = str(self._ebounds.emax())

        # Log energy range
        self._log_header3(gammalib.NORMAL, 'Energy range')
        self._log_value(gammalib.NORMAL, 'Minimum energy', min_value)
        self._log_value(gammalib.NORMAL, 'Maximum energy', max_value)

        # Log time range
        mjd = '%.3f - %.3f' % (self._gti.tstart().mjd(),
                               self._gti.tstop().mjd())
        utc = '%s - %s' % (self._gti.tstart().utc(), self._gti.tstop().utc())
        self._log_header3(gammalib.NORMAL, 'Time range')
        self._log_value(gammalib.NORMAL, 'MJD (days)', mjd)
        self._log_value(gammalib.NORMAL, 'UTC', utc)

        # Log ontime and livetime in different units
        on_time   = '%.2f s = %.2f min = %.2f h' % \
                    (ontime, ontime/60., ontime/3600.)
        live_time = '%.2f s = %.2f min = %.2f h' % \
                    (livetime, livetime/60., livetime/3600.)
        self._log_value(gammalib.NORMAL, 'Total ontime', on_time)
        self._log_value(gammalib.NORMAL, 'Total livetime', live_time)

        # Return
        return
Exemple #5
0
    def run(self):
        """
        Run the script
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Initialise arrays to store certain values for reuse
        # Todo, think about using a python dictionary
        self._offsets = []
        self._zeniths = []
        self._azimuths = []
        self._pnt_ra = []
        self._pnt_dec = []
        self._ebounds = gammalib.GEbounds()
        self._gti = gammalib.GGti()
        obs_names = []

        # Initialise output to be filled
        ontime = 0.0
        livetime = 0.0
        n_events = 0
        n_eventbins = 0
        n_obs_binned = 0
        n_obs_unbinned = 0

        # Write header
        if self._logTerse():
            self._log('\n')
            self._log.header1(gammalib.number('Observation', self._obs.size()))

        # Loop over observations
        for obs in self._obs:

            # Skip non-CTA observations
            if not obs.classname() == 'GCTAObservation':
                self._log('Skipping ' + obs.instrument() + ' observation\n')
                continue

            # Use observed object as observation name if name is not given
            obs_name = obs.name()
            if obs_name == '':
                obs_name = obs.object()

            # Logging
            if self._logTerse():
                self._log.header2(obs_name)

            # Retrieve observation name
            obs_names.append(obs_name)

            # Retrieve energy boundaries
            obs_bounds = obs.events().ebounds()

            # Retrieve time interval
            obs_gti = obs.events().gti()

            # Compute mean time and dead time fraction in percent
            deadfrac = (1.0 - obs.deadc()) * 100.0

            # Retrieve pointing and store Ra,Dec
            pnt_dir = obs.pointing().dir()
            self._pnt_ra.append(pnt_dir.ra_deg())
            self._pnt_dec.append(pnt_dir.dec_deg())

            # If avaliable append energy boundaries
            if obs_bounds.size() > 0:
                self._ebounds.append(obs_bounds.emin(), obs_bounds.emax())

            # Append time interval
            self._gti.append(obs_gti.tstart(), obs_gti.tstop())

            # Increment global livetime and ontime
            ontime += obs.ontime()
            livetime += obs.livetime()

            # Bookkeeping
            if obs.eventtype() == 'CountsCube':
                n_eventbins += obs.events().size()
                n_obs_binned += 1
                if self._logTerse():
                    self._log.parformat('Binned')
                    self._log('yes\n')
                    self._log.parformat('Number of bins')
                    self._log(str(obs.events().size()))
                    self._log('\n')
            else:
                n_events += obs.events().size()
                n_obs_unbinned += 1
                if self._logTerse():
                    self._log.parformat('Binned')
                    self._log('no\n')
                    self._log.parformat('Number of events')
                    self._log(str(obs.events().size()))
                    self._log('\n')

            # Retrieve zenith and azimuth and store for later use
            zenith = obs.pointing().zenith()
            azimuth = obs.pointing().azimuth()
            self._zeniths.append(zenith)
            self._azimuths.append(azimuth)

            # Optionally compute offset with respect to target direction
            if self._compute_offset:
                offset = pnt_dir.dist_deg(self._obj_dir)
                self._offsets.append(offset)
            else:
                self._offsets.append(-1.0)

            # Optionally log details
            if self._logTerse():

                # Log the observation energy range (if available)
                self._log.parformat('Energy range')
                if obs_bounds.size() == 0:
                    self._log('undefined')
                else:
                    self._log(str(obs_bounds.emin()))
                    self._log(' - ')
                    self._log(str(obs_bounds.emax()))
                self._log('\n')

                # Log observation time interval
                self._log.parformat('Time range (MJD)')
                if obs_gti.size() == 0:
                    self._log('undefined')
                else:
                    self._log(str(obs_gti.tstart().mjd()))
                    self._log(' - ')
                    self._log(str(obs_gti.tstop().mjd()))
                self._log('\n')

                # Log ontime
                self._log.parformat('Ontime')
                self._log(str(obs.ontime()))
                self._log(' s\n')

                # Log livetime
                self._log.parformat('Livetime')
                self._log(str(obs.livetime()))
                self._log(' s\n')

                # Log dead time fraction
                self._log.parformat('Deadtime fraction (%)')
                self._log('%.3f' % (deadfrac))
                self._log('\n')

                # Log pointing direction
                self._log.parformat('Pointing')
                self._log(str(pnt_dir))
                self._log('\n')

                # Optionally log offset with respect to target direction
                if self._compute_offset:
                    self._log.parformat('Offset from target')
                    self._log('%.2f' % (offset))
                    self._log(' deg\n')

                # Log Zenith and Azimuth if required
                self._log.parformat('Zenith angle')
                self._log('%.2f' % (zenith))
                self._log(' deg\n')
                self._log.parformat('Azimuth angle')
                self._log('%.2f' % (azimuth))
                self._log(' deg\n')

        # Log summary
        if self._logTerse():

            # Write header
            self._log('\n')
            self._log.header1('Summary')

            # Log general summary
            self._log.header3('Observations')
            self._log.parformat('Unbinned observations')
            self._log(str(n_obs_unbinned))
            self._log('\n')
            self._log.parformat('Binned observations')
            self._log(str(n_obs_binned))
            self._log('\n')
            self._log.header3('Events')
            self._log.parformat('Number of events')
            self._log(str(n_events))
            self._log('\n')
            self._log.parformat('Number of bins')
            self._log(str(n_eventbins))
            self._log('\n')

            # Log pointing summary
            self._log.header3('Pointings')

            # Log mean offset if possible
            if self._compute_offset:
                self._log.parformat('Mean offset angle')
                self._log('%.2f' % (sum(self._offsets) / len(self._offsets)))
                self._log(' deg\n')

            # Log mean azimuth and zenith angle
            self._log.parformat('Mean zenith angle')
            self._log('%.2f' % (sum(self._zeniths) / len(self._zeniths)))
            self._log(' deg\n')
            self._log.parformat('Mean azimuth angle')
            self._log('%.2f' % (sum(self._azimuths) / len(self._azimuths)))
            self._log(' deg\n')

            # Optionally log names of observations. Note that the set class
            # is used to extract all different observation names from the
            # list of observation names, and the set class is only available
            # from Python 2.4 on.
            if self._logExplicit() and sys.version_info >= (2, 4):
                obs_set = set(obs_names)
                for name in obs_set:
                    self._log.parformat('"' + name + '"')
                    self._log(str(obs_names.count(name)))
                    self._log('\n')
                self._log('\n')

            # Log energy range
            self._log.header3('Energy range')
            self._log.parformat('Minimum energy')
            if self._ebounds.size() == 0:
                self._log('undefined')
            else:
                self._log(str(self._ebounds.emin()))
            self._log('\n')
            self._log.parformat('Maximum energy')
            if self._ebounds.size() == 0:
                self._log('undefined')
            else:
                self._log(str(self._ebounds.emax()))
            self._log('\n')

            # Log time range
            self._log.header3('Time range')
            self._log.parformat('Start (MJD)')
            self._log(str(self._gti.tstart().mjd()))
            self._log('\n')
            self._log.parformat('Stop (MJD)')
            self._log(str(self._gti.tstop().mjd()))
            self._log('\n')

            # Log ontime and livetime in different units
            self._log.parformat('Total ontime')
            self._log('%.2f s = %.2f min = %.2f h' %
                      (ontime, ontime / 60., ontime / 3600.))
            self._log('\n')
            self._log.parformat('Total livetime')
            self._log('%.2f s = %.2f min = %.2f h' %
                      (livetime, livetime / 60.0, livetime / 3600.))
            self._log('\n')

        # Return
        return
Exemple #6
0
    def run(self):
        """
        Run the script
        """
        # Switch screen logging on in debug mode
        if self._logDebug():
            self._log.cout(True)

        # Get parameters
        self._get_parameters()

        # Write observation into logger
        self._log_observations(gammalib.NORMAL, self.obs(), 'Observation')

        # Get time boundaries
        tmin = self._tbins.tstart(0)
        tmax = self._tbins.tstop(self._tbins.size()-1)

        # Select events
        select = ctools.ctselect(self.obs())
        select['emin'] = self['emin'].real()
        select['emax'] = self['emax'].real()
        select['tmin'] = tmin.convert(ctools.time_reference)
        select['tmax'] = tmax.convert(ctools.time_reference)
        select['rad']  = 'UNDEFINED'
        select['ra']   = 'UNDEFINED'
        select['dec']  = 'UNDEFINED'
        select.run()

        # Extract observations
        self.obs(select.obs().copy())

        # Write observation into logger
        self._log_header1(gammalib.TERSE,
                          gammalib.number('Selected observation',
                                          len(self.obs())))

        # Adjust model parameters dependent on user parameters
        self._adjust_model_pars()

        # Write header
        self._log_header1(gammalib.TERSE, 'Generate lightcurve')

        # Initialise list of result dictionaries
        results = []

        # Get source parameters
        pars = self._get_free_par_names()

        # Loop over time bins
        for i in range(self._tbins.size()):

            # Get time boundaries
            tmin = self._tbins.tstart(i)
            tmax = self._tbins.tstop(i)

            # Write time bin into header
            self._log_header2(gammalib.TERSE, 'MJD %f - %f ' %
                              (tmin.mjd(), tmax.mjd()))

            # Compute time bin center and time width
            twidth = 0.5 * (tmax - tmin) # in seconds
            tmean  = tmin + twidth

            # Initialise result dictionary
            result = {'mjd': tmean.mjd(),
                      'e_mjd': twidth / gammalib.sec_in_day,
                      'ts': 0.0,
                      'ul_diff': 0.0,
                      'ul_flux': 0.0,
                      'ul_eflux': 0.0,
                      'pars': pars,
                      'values': {}}

            # Log information
            self._log_header3(gammalib.EXPLICIT, 'Selecting events')

            # Select events
            select = ctools.ctselect(self.obs())
            select['emin'] = self['emin'].real()
            select['emax'] = self['emax'].real()
            select['tmin'] = tmin.convert(ctools.time_reference)
            select['tmax'] = tmax.convert(ctools.time_reference)
            select['rad']  = 'UNDEFINED'
            select['ra']   = 'UNDEFINED'
            select['dec']  = 'UNDEFINED'
            select.run()

            # Retrieve observation
            obs = select.obs()

            # Deal with stacked and On/Off Observations
            if self._stacked or self._onoff:

                # If a stacked analysis is requested bin the events
                # and compute the stacked response functions and setup
                # an observation container with a single stacked observation.
                if self._stacked:
                    new_obs = obsutils.get_stacked_obs(self, obs)

                # ... otherwise if On/Off analysis is requested generate
                # the On/Off observations and response
                elif self._onoff:
                    new_obs = obsutils.get_onoff_obs(self, obs)

                # Extract models
                models = new_obs.models()

                # Fix background models if required
                if self['fix_bkg'].boolean():
                    for model in models:
                        if model.classname() != 'GModelSky':
                            for par in model:
                                par.fix()

                # Put back models
                new_obs.models(models)

                # Continue with new oberservation container
                obs = new_obs

            # Header
            self._log_header3(gammalib.EXPLICIT, 'Fitting the data')

            # Do maximum likelihood model fitting
            if obs.size() > 0:
                like = ctools.ctlike(obs)
                like['edisp'] = self['edisp'].boolean()
                like.run()

                # Skip bin if no event was present
                if like.obs().logL() == 0.0:

                    # Signal skipping of bin
                    self._log_value(gammalib.TERSE, 'Warning',
                                    'No event in this time bin, skip bin.')

                    # Set all results to 0
                    for par in pars:
                        result['values'][par]      = 0.0
                        result['values']['e_'+par] = 0.0

                    # Append result
                    results.append(result)

                    # Continue with next time bin
                    continue

                # Retrieve model fitting results for source of interest
                source = like.obs().models()[self._srcname]

                # Extract parameter values
                for par in pars:
                    result['values'][par]      = source[par].value()
                    result['values']['e_'+par] = source[par].error()

                # Calculate upper limit (-1 if not computed)
                #ul_diff, ul_flux, ul_eflux = self._compute_ulimit(like.obs())
                ul_diff, ul_flux, ul_eflux = self._compute_ulimit(obs)
                if ul_diff > 0.0:
                    result['ul_diff']  = ul_diff
                    result['ul_flux']  = ul_flux
                    result['ul_eflux'] = ul_eflux

                # Extract Test Statistic value
                if self['calc_ts'].boolean():
                    result['ts'] = source.ts()

                # Append result to list of dictionaries
                results.append(result)

                # Log results for this time bin
                self._log.header3('Results')
                pars = self._get_free_par_names()
                for par in pars:
                    value = source[par].value()
                    error = source[par].error()
                    unit  = source[par].unit()
                    self._log_value(gammalib.NORMAL, par,
                                    str(value)+' +/- '+str(error)+' '+unit)
                if ul_diff > 0.0:
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_diff'])+' ph/cm2/s/MeV')
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_flux'])+' ph/cm2/s')
                    self._log_value(gammalib.NORMAL, 'Upper flux limit',
                                    str(result['ul_eflux'])+' erg/cm2/s')
                if self['calc_ts'].boolean():
                    self._log_value(gammalib.NORMAL, 'Test Statistic', result['ts'])

            # Otherwise, if observations size is 0, signal bin is skipped and
            # fill results table with zeros
            else:
                self._log_value(gammalib.TERSE, 'Warning',
                                'No observations available in this time bin, '
                                'skip bin.')

                # Set all results to 0
                for par in pars:
                    result['values'][par]        = 0.0
                    result['values']['e_' + par] = 0.0

                # Append result
                results.append(result)

                # Continue with next time bin
                continue

        # Create FITS table from results
        table = self._create_fits_table(results)

        # Create FITS file and append FITS table to FITS file
        self._fits = gammalib.GFits()
        self._fits.append(table)

        # Optionally publish light curve
        if self['publish'].boolean():
            self.publish()

        # Return
        return
Exemple #7
0
    def _parse_workflow(self):

        # Get workflow
        workflow = self._workflow.element('workflow')

        # Get number of actors
        num_actors = workflow.elements('actor')

        # Initialise actors
        self._actors = []

        # Loop over all actors
        for i in range(num_actors):

            # Get actor
            actor = workflow.element('actor', i)

            # Initialise parameters and input actors
            input_parameters = []
            output_parameters = []
            input_actors = []
            output_actors = []

            # Get actor attributes
            name = actor.attribute('name')
            tool = actor.attribute('tool')

            # Get actor input parameters
            if actor.elements('input') > 0:
                actor_inputs = actor.element('input')
                num_inputs = actor_inputs.elements('parameter')
                for k in range(num_inputs):
                    input_par = actor_inputs.element('parameter', k)
                    input_name = input_par.attribute('name')
                    input_value = input_par.attribute('value')
                    input_actor = input_par.attribute('actor')
                    parameter   = {'name': input_name, \
                                   'value': input_value, \
                                   'actor': input_actor}
                    input_parameters.append(parameter)
                    if input_actor != '':
                        if input_actor not in input_actors:
                            input_actors.append(input_actor)

            # Get actor output parameters
            if actor.elements('output') > 0:
                actor_output = actor.element('output')
                num_outputs = actor_output.elements('parameter')
                for k in range(num_outputs):
                    output_par = actor_output.element('parameter', k)
                    output_name = output_par.attribute('name')
                    output_value = output_par.attribute('value')
                    output_actor = output_par.attribute('actor')
                    parameter    = {'name': output_name, \
                                    'value': output_value, \
                                    'actor': output_actor}
                    output_parameters.append(parameter)
                    if output_actor != '':
                        if output_actor not in output_actors:
                            output_actors.append(output_actor)

            # Determine number of dependencies
            num_inputs = len(input_actors)

            # Set actor status
            if num_inputs > 0:
                status = 'waiting for input'
            else:
                status = 'ready'

            # Create actor entry
            entry = {
                'name': name,
                'tool': tool,
                'input_parameters': input_parameters,
                'input_actors': input_actors,
                'output_parameters': output_parameters,
                'output_actors': output_actors,
                'status': status
            }

            # Append entry
            self._actors.append(entry)

            # Log information about actors
            self._log_value(gammalib.NORMAL, 'Actor "%s"' % name, tool)

            # Compute list of predecessors
            if num_inputs == 0:
                predecessors = 'none'
            else:
                predecessors = ''
                for k in range(num_inputs):
                    if k > 0:
                        predecessors += ', '
                    predecessors += '"' + input_actors[k] + '"'

            # Log predecessors
            self._log_value(gammalib.NORMAL,
                            gammalib.number('  Predecessor', num_inputs),
                            predecessors)

        # Return
        return