Exemple #1
0
def run(hist, options, h_type='ADC',prev_fit_result=None):
    """
    Fill the adcs histogram out of darkrun/baseline runs
    :param h_type: type of histogram to produce: ADC for all samples adcs or SPE for only peaks
    :param hist: the histogram to fill
    :param options: see analyse_spe.py
    :return:
    """
    # Reading the file
    n_evt, n_batch, batch_num, max_evt = 0, options.n_evt_per_batch, 0, options.evt_max
    batch = None

    print('--|> Treating the batch #%d of %d events' % (batch_num, n_batch))
    for file in options.file_list:
        # Open the file
        _url = options.directory + options.file_basename % file
        inputfile_reader = zfits.zfits_event_source(
            url=_url
            , data_type='r1', max_events=options.evt_max)

        if options.verbose: 
            print('--|> Moving to file %s' % _url)
        # Loop over event in this file
        for event in inputfile_reader:
            n_evt += 1
            if n_evt > max_evt: 
                break
            if (n_evt - n_batch * batch_num) % n_batch/100 == 0:
                print("Progress {:2.1%}".format(float(n_evt - batch_num * n_batch) / n_batch), end="\r")
            for telid in event.r1.tels_with_data:
                if n_evt % n_batch == 0:
                    print('--|> Treating the batch #%d of %d events' % (batch_num, n_batch))
                    # Update adc histo
                    if h_type == 'ADC':
                        hist.fill_with_batch(batch.reshape(batch.shape[0], batch.shape[1] * batch.shape[2]))
                    elif h_type == 'SPE':
                        hist.fill_with_batch(
                            spe_peaks_in_event_list(batch, prev_fit_result[:, 1, 0], prev_fit_result[:, 2, 0]))
                    # Reset the batch
                    batch = None
                    batch_num += 1
                    print('--|> Reading  the batch #%d of %d events' % (batch_num, n_batch))
                # Get the data
                data = np.array(list(event.r1.tel[telid].adc_samples.values()))
                # Append the data to the batch
                if type(batch).__name__ != 'ndarray':
                    batch = data.reshape(data.shape[0], 1, data.shape[1])
                else:
                    batch = np.append(batch, data.reshape(data.shape[0], 1, data.shape[1]), axis=1)

    if options.verbose:
        print('--|> Save the data in %s' % (options.output_directory + options.histo_filename))
    np.savez_compressed(options.output_directory + options.histo_filename,
                        adcs=hist.data, adcs_bin_centers=hist.bin_centers)
def run(hist, options, min_evt=5000. * 3, max_evt=5000 * 10):
    # Few counters
    evt_num, first_evt, first_evt_num = 0, True, 0
    for file in options.file_list:
        if evt_num > max_evt: break
        # read the file
        _url = options.directory + options.file_basename % file

        if not options.toy_test:

            inputfile_reader = zfits.zfits_event_source(url=_url,
                                                        data_type='r1',
                                                        max_events=100000)

        else:
            inputfile_reader = ToyReader(filename=_url,
                                         id_list=[0],
                                         max_events=5000,
                                         n_pixel=options.n_pixels)

        if options.verbose:
            print('--|> Moving to file %s' % _url)
        # Loop over event in this file
        for event in inputfile_reader:
            if evt_num < min_evt:
                evt_num += 1
                continue
            if evt_num > max_evt: break
            for telid in event.r1.tels_with_data:
                evt_num += 1
                if evt_num > max_evt: break
                if options.verbose and event.r1.event_id % 1000 == 0:
                    print("Progress {:2.1%}".format(evt_num / max_evt),
                          end="\r")
                # get the data
                data = np.array(list(event.r1.tel[telid].adc_samples.values()))
                # subtract the pedestals
                data = data
                hist.fill(np.argmax(data, axis=1))

    # Update the errors
    # noinspection PyProtectedMember
    hist._compute_errors()
    # Save the MPE histos in a file

    if options.verbose:
        print('--|> Save the data in %s' %
              (options.output_directory + options.peak_histo_filename))
    np.savez_compressed(options.output_directory + options.peak_histo_filename,
                        peaks=hist.data,
                        peaks_bin_centers=hist.bin_centers)
Exemple #3
0
        neigh += geom.neighbors[i]
        mask[i][np.array(neigh)] = True
    return mask


def sum_cluster(data, masks):
    data_tmp = np.array(data)
    data_tmp = data_tmp.reshape(1, 1296)
    data_tmp = np.repeat(data_tmp, 1296, axis=0)
    return np.sum(np.ma.masked_array(data_tmp, mask=masks, fill_value=0),
                  axis=0)


inputfile_reader = zfits.zfits_event_source(
    url=
    "/data/datasets/CTA/[email protected]_0_000.36.fits.fz",
    data_type='r1',
    max_events=2130)
if plotting:
    plt.figure(0)
    displayType = []
    plt.subplot(1, 1, 1)
    displayType.append(
        visualization.CameraDisplay(
            geom,
            title='Integrated ADC over 200ns, pedestal subtracted',
            norm='lin',
            cmap='coolwarm'))

neigh_mask = generate_neighbourg_masks()
hollow_neigh_mask = generate_hollow_neighbourg_masks()
def run(hist, options, h_type='ADC', prev_fit_result=None):
    """
    Fill the adcs Histogram out of darkrun/baseline runs
    :param h_type: type of Histogram to produce: ADC for all samples adcs or SPE for only peaks
    :param hist: the Histogram to fill
    :param options: see analyse_spe.py
    :param prev_fit_result: fit result of a previous step needed for the calculations
    :return:
    """
    logger = logging.getLogger(sys.modules['__main__'].__name__ + '.' +
                               __name__)
    # Reading the file
    n_evt, n_batch, batch_num, max_evt = 0, options.n_evt_per_batch, 0, options.evt_max
    batch = None

    if not options.mc:
        logger.info('Running on DigiCam data')
        print(logger)
    else:
        logger.info('Running on MC data')

    logger.debug('Treating the batch #%d of %d events' % (batch_num, n_batch))
    for file in options.file_list:
        # Open the file
        _url = options.directory + options.file_basename % file
        if not options.mc:
            inputfile_reader = zfits.zfits_event_source(url=_url,
                                                        data_type='r1',
                                                        max_events=100000)
        else:
            inputfile_reader = ToyReader(filename=_url,
                                         id_list=[0],
                                         max_events=options.evt_max,
                                         n_pixel=options.n_pixels)

        logger.debug('--|> Moving to file %s' % _url)
        # Loop over event in this file
        for event in inputfile_reader:
            n_evt += 1
            if n_evt > max_evt:
                break
            if (n_evt - n_batch * batch_num) % n_batch / 100 == 0:
                print(float(n_evt - batch_num * n_batch) / n_batch)
                print("Progress {:2.1%}".format(
                    float(n_evt - batch_num * n_batch) / n_batch),
                      end="\r")
            for telid in event.r1.tels_with_data:
                if n_evt % n_batch == 0:
                    logger.debug('Treating the batch #%d of %d events' %
                                 (batch_num, n_batch))
                    # Update adc histo
                    if h_type == 'ADC':
                        hist.fill_with_batch(
                            batch.reshape(batch.shape[0],
                                          batch.shape[1] * batch.shape[2]))
                    elif h_type == 'SPE':
                        hist.fill_with_batch(
                            spe_peaks_in_event_list(batch,
                                                    prev_fit_result[:, 1, 0],
                                                    prev_fit_result[:, 2, 0]))
                    # Reset the batch
                    batch = None
                    batch_num += 1
                    logger.debug('Reading  the batch #%d of %d events' %
                                 (batch_num, n_batch))
                # Get the data
                data = np.array(list(event.r1.tel[telid].adc_samples.values()))
                # Append the data to the batch
                if type(batch).__name__ != 'ndarray':
                    batch = data.reshape(data.shape[0], 1, data.shape[1])
                else:
                    batch = np.append(batch,
                                      data.reshape(data.shape[0], 1,
                                                   data.shape[1]),
                                      axis=1)

    return
Exemple #5
0
def run(hists, options, peak_positions=None, data_type='r1'):
    # Few counters
    level, evt_num, first_evt, first_evt_num = 0, 0, True, 0
    for file in options.file_list:
        if level > len(options.scan_level) - 1:
            break
        # Get the file
        _url = options.directory + options.file_basename % file
        if not options.toy_test:

            inputfile_reader = zfits.zfits_event_source(url=_url,
                                                        data_type='r1',
                                                        max_events=100000)

        else:
            inputfile_reader = ToyReader(filename=_url,
                                         id_list=[0],
                                         max_events=5000,
                                         n_pixel=options.n_pixels)

        if options.verbose:
            print('--|> Moving to file %s' % _url)
        # Loop over event in this file
        for event in inputfile_reader:
            if level > len(options.scan_level) - 1:
                break
            for telid in event.r1.tels_with_data:
                if first_evt:
                    first_evt_num = event.r1.tel[telid].eventNumber
                    first_evt = False
                evt_num = event.r1.tel[telid].eventNumber - first_evt_num
                if evt_num % options.events_per_level == 0:
                    level = int(evt_num / options.events_per_level)
                    if level > len(options.scan_level) - 1:
                        break
                    if options.verbose:
                        print('--|> Moving to DAC Level %d' %
                              (options.scan_level[level]))
                if options.verbose and event.r1.event_id % 100 == 0:
                    print("Progress {:2.1%}".format(
                        (evt_num - level * options.events_per_level) /
                        options.events_per_level),
                          end="\r")
                # get the data
                data = np.array(list(event.r1.tel[telid].adc_samples.values()))
                # subtract the pedestals
                data = data
                # put in proper format
                data = data.reshape((1, ) + data.shape)
                # integration parameter
                params = {
                    "integrator": "nb_peak_integration",
                    "integration_window": [8, 4],
                    "integration_sigamp": [2, 4],
                    "integration_lwt": 0
                }
                # now integrate
                #integration, window, peakpos = integrators.simple_integration(data, params)
                # try with the max instead
                peak = np.argmax(data[0], axis=1)
                if type(peak_positions).__name__ == 'ndarray':
                    peak = np.argmax(peak_positions, axis=1)

                index_max = (
                    np.arange(0, data[0].shape[0]),
                    peak,
                )
                '''
                peak_m1 =  peak - 1
                peak_m1[peak_m1<0]=0
                peak_p1 =  peak + 1
                peak_p1[peak_p1>49]=49

                index_max_m1 = (np.arange(0, data[0].shape[0]), peak_m1,)
                index_max_p1 = (np.arange(0, data[0].shape[0]), peak_p1,)
                h = np.append(data[0][index_max].reshape(data[0][index_max].shape+(1,)),
                              data[0][index_max_m1].reshape(data[0][index_max_m1].shape+(1,)),axis=1)
                h = np.append(h,
                              data[0][index_max_p1].reshape(data[0][index_max_p1].shape + (1,)),axis=1)

                max_value = np.max(h,axis=1)
                '''
                hists[0].fill(data[0][index_max], indices=(level, ))
                # and fill the histos
                #if hists[0] : hists[0].fill(integration[0], indices=(level,))
                #if hists[1]: hists[1].fill(max_value, indices=(level,))

    # Update the errors
    for hist in hists:
        hist._compute_errors()
    # Save the MPE histos in a file
    if options.verbose:
        print('--|> Save the data in %s' %
              (options.output_directory + options.histo_filename))
    np.savez_compressed(options.output_directory + options.histo_filename,
                        mpes=hists[0].data,
                        mpes_bin_centers=hists[0].bin_centers)