Beispiel #1
0
def create_fixture(file_in, file_out, n_readouts, nodes):
    with tb.open_file(file_in, 'r') as in_file:
        with tb.open_file(file_out, 'w') as out_file:
            in_file.copy_node('/configuration', out_file.root, recursive=True)
            start, stop = None, None
            if 'meta_data' in nodes:
                node = in_file.get_node('/meta_data')
                meta_data = node[:n_readouts]
                try:
                    start, stop = meta_data['index_start'][0], meta_data[
                        'index_stop'][-1]
                except IndexError:
                    start, stop = meta_data['hit_start'][0], meta_data[
                        'hit_stop'][-1]
                t = out_file.create_table(out_file.root,
                                          name=node.name,
                                          description=node.description,
                                          filters=node.filters)
                t.append(meta_data)
            for n in nodes:
                if n == 'meta_data':
                    continue
                node = in_file.get_node('/' + n)
                data = node[start:stop]
                if type(node) == tb.earray.EArray:
                    earray = out_file.create_earray(out_file.root,
                                                    name=node.name,
                                                    atom=tb.UIntAtom(),
                                                    shape=(0, ),
                                                    title=node.title,
                                                    filters=node.filters)
                    earray.append(data)
Beispiel #2
0
    def open(self, filename, mode='w', title=''):
        if os.path.splitext(filename)[1].strip().lower() != '.h5':
            filename = os.path.splitext(filename)[0] + '.h5'
        if os.path.isfile(filename) and mode in ('r+', 'a'):
            logging.info('Opening existing raw data file: %s', filename)
        else:
            logging.info('Opening new raw data file: %s', filename)
        if self.socket:
            send_meta_data(self.socket, None,
                           name='Reset')  # send reset to indicate a new scan
            send_meta_data(self.socket,
                           os.path.basename(filename),
                           name='Filename')

        filter_raw_data = tb.Filters(complib='blosc',
                                     complevel=5,
                                     fletcher32=False)
        filter_tables = tb.Filters(complib='zlib',
                                   complevel=5,
                                   fletcher32=False)
        self.h5_file = tb.open_file(filename,
                                    mode=mode,
                                    title=title if title else filename)
        try:
            self.raw_data_earray = self.h5_file.create_earray(
                self.h5_file.root,
                name='raw_data',
                atom=tb.UIntAtom(),
                shape=(0, ),
                title='raw_data',
                filters=filter_raw_data)  # expectedrows = ???
        except tb.exceptions.NodeError:
            self.raw_data_earray = self.h5_file.get_node(self.h5_file.root,
                                                         name='raw_data')
        try:
            self.meta_data_table = self.h5_file.create_table(
                self.h5_file.root,
                name='meta_data',
                description=MetaTable,
                title='meta_data',
                filters=filter_tables)
        except tb.exceptions.NodeError:
            self.meta_data_table = self.h5_file.get_node(self.h5_file.root,
                                                         name='meta_data')
        if self.scan_parameters:
            try:
                scan_param_descr = generate_scan_parameter_description(
                    self.scan_parameters)
                self.scan_param_table = self.h5_file.create_table(
                    self.h5_file.root,
                    name='scan_parameters',
                    description=scan_param_descr,
                    title='scan_parameters',
                    filters=filter_tables)
            except tb.exceptions.NodeError:
                self.scan_param_table = self.h5_file.get_node(
                    self.h5_file.root, name='scan_parameters')
Beispiel #3
0
    def setup_files(self, iteration=None):
        '''
            Setup the HDF5 file by creating the earrays and tables for raw_data and meta_data
            If a scan has multiple iterations individual earrays and tables can be created for
            each iteration
        '''

        filter_raw_data = tb.Filters(complib='blosc',
                                     complevel=5,
                                     fletcher32=False)
        self.filter_tables = tb.Filters(complib='zlib',
                                        complevel=5,
                                        fletcher32=False)

        # Scans without multiple iterations
        if iteration == None:
            self.raw_data_earray = self.h5_file.create_earray(
                self.h5_file.root,
                name='raw_data',
                atom=tb.UIntAtom(),
                shape=(0, ),
                title='raw_data',
                filters=filter_raw_data)
            self.meta_data_table = self.h5_file.create_table(
                self.h5_file.root,
                name='meta_data',
                description=MetaTable,
                title='meta_data',
                filters=self.filter_tables)
        # Scans with multiple iterations
        else:
            self.raw_data_earray = self.h5_file.create_earray(
                self.h5_file.root,
                name='raw_data_' + str(iteration),
                atom=tb.UIntAtom(),
                shape=(0, ),
                title='raw_data_' + str(iteration),
                filters=filter_raw_data)
            self.meta_data_table = self.h5_file.create_table(
                self.h5_file.root,
                name='meta_data_' + str(iteration),
                description=MetaTable,
                title='meta_data_' + str(iteration),
                filters=self.filter_tables)
Beispiel #4
0
    def start(self, **kwargs):

        fh = logging.FileHandler(self.output_filename + '.log')
        fh.setLevel(logging.DEBUG)
        logger = logging.getLogger()
        logger.addHandler(fh)

        self._first_read = False
        self.scan_param_id = 0

        filename = self.output_filename + '.h5'
        filter_raw_data = tb.Filters(complib='blosc', complevel=5, fletcher32=False)
        self.filter_tables = tb.Filters(complib='zlib', complevel=5, fletcher32=False)
        self.h5_file = tb.open_file(filename, mode='w', title=self.scan_id)
        self.raw_data_earray = self.h5_file.createEArray(self.h5_file.root, name='raw_data', atom=tb.UIntAtom(),
                                                         shape=(0,), title='raw_data', filters=filter_raw_data)
        self.meta_data_table = self.h5_file.createTable(self.h5_file.root, name='meta_data', description=MetaTable,
                                                        title='meta_data', filters=self.filter_tables)

        self.meta_data_table.attrs.kwargs = yaml.dump(kwargs)

        self.dut['control']['RESET'] = 0b00
        self.dut['control'].write()
        self.dut.power_up()
        time.sleep(0.1)

        self.fifo_readout = FifoReadout(self.dut)

        # default config
        # TODO: load from file
        self.dut['global_conf']['PrmpVbpDac'] = 80
        self.dut['global_conf']['vthin1Dac'] = 255
        self.dut['global_conf']['vthin2Dac'] = 0
        self.dut['global_conf']['vffDac'] = 42
        self.dut['global_conf']['PrmpVbnFolDac'] = 51
        self.dut['global_conf']['vbnLccDac'] = 1
        self.dut['global_conf']['compVbnDac'] = 25
        self.dut['global_conf']['preCompVbnDac'] = 50

        self.dut['global_conf']['Latency'] = 400
        # chip['global_conf']['ColEn'][0] = 1
        self.dut['global_conf']['ColEn'].setall(True)
        self.dut['global_conf']['ColSrEn'].setall(True)  # enable programming of all columns
        self.dut['global_conf']['ColSrOut'] = 15

        self.dut['global_conf']['OneSr'] = 0  # all multi columns in parallel
        self.dut.write_global()

        self.dut['control']['RESET'] = 0b10
        self.dut['control'].write()

        logging.info('Power Status: %s', str(self.dut.power_status()))

        self.scan(**kwargs)

        self.fifo_readout.print_readout_status()

        self.meta_data_table.attrs.power_status = yaml.dump(self.dut.power_status())
        self.meta_data_table.attrs.dac_status = yaml.dump(self.dut.dac_status())

        self.h5_file.close()
        logging.info('Data Output Filename: %s', self.output_filename + '.h5')

        logger.removeHandler(fh)
        self.dut.power_down()
Beispiel #5
0
	def scan(self, mask_steps=4, repeat_command=100, columns=[True] * 16, **kwargs):
		'''Scan loop

		Parameters
		----------
		mask : int
			Number of mask steps.
		repeat : int
			Number of injections.
		'''

		#bitfiles
		self.clock_name='DATA clock'
		path = "/home/carlo/fe65_p2/firmware/ise/DATA_bits/"
#		self.bitfiles = OrderedDict([(20,"fe65p2_mio_CMD20.bit"), (30,"fe65p2_mio_CMD30.bit"),(40,"fe65p2_mio_CMD40.bit"),(50,"fe65p2_mio_CMD50.bit"), (60,"fe65p2_mio_CMD60.bit"),(70,"fe65p2_mio_CMD70.bit"),(80,"fe65p2_mio_CMD80.bit"),(90,"fe65p2_mio_CMD90.bit"), (100,"fe65p2_mio_CMD100.bit"), (110,"fe65p2_mio_CMD110.bit"), (120,"fe65p2_mio_CMD120.bit"), (130,"fe65p2_mio_CMD130.bit"), (140,"fe65p2_mio_CMD140.bit"), (150,"fe65p2_mio_CMD150.bit"),  (160,"fe65p2_mio_CMD160.bit")])
		self.bitfiles = OrderedDict([(40, "fe65p2_mio_DATA40.bit"),(60, "fe65p2_mio_DATA60.bit"), (80, "fe65p2_mio_DATA80.bit"), (100, "fe65p2_mio_DATA100.bit"),(120, "fe65p2_mio_DATA120.bit"),(160, "fe65p2_mio_DATA160.bit")])


		self.voltages = [1.5, 1.4, 1.3, 1.2, 1.1, 1.0, 0.95, 0.9]
#		self.voltages = [1.3, 1.2, 1.1, 1.0]

		self.not_fired = []
		for freq in self.bitfiles.iterkeys():
			logging.info("Loading " + self.bitfiles[freq])  #loading bitfile
			self.dut['intf']._sidev.DownloadXilinx(path + self.bitfiles[freq])

			for volt in self.voltages:
				# to change the supply voltage
				self.dut['VDDA'].set_current_limit(200, unit='mA')
				self.dut['VDDA'].set_voltage(volt, unit='V')
				self.dut['VDDA'].set_enable(True)
				self.dut['VDDD'].set_voltage(volt, unit='V')
				self.dut['VDDD'].set_enable(True)
				self.dut['VAUX'].set_voltage(volt, unit='V')
				self.dut['VAUX'].set_enable(True)
				logging.info(scan.dut.power_status())		#prints power supply
				self.run_name = time.strftime("%Y%m%d_%H%M%S_") + "_" + str(freq) + "MHz_" + str(volt) +"V"
				self.output_filename = os.path.join(self.working_dir, self.run_name)
				self._first_read = False
				self.scan_param_id = 0

				# .h5 output management
				filename = self.output_filename + '.h5'
				filter_raw_data = tb.Filters(complib='blosc', complevel=5, fletcher32=False)
				self.filter_tables = tb.Filters(complib='zlib', complevel=5, fletcher32=False)
				self.h5_file = tb.open_file(filename, mode='w', title=self.scan_id)
				self.raw_data_earray = self.h5_file.createEArray(self.h5_file.root, name='raw_data', atom=tb.UIntAtom(),
																 shape=(0,), title='raw_data', filters=filter_raw_data)
				self.meta_data_table = self.h5_file.createTable(self.h5_file.root, name='meta_data', description=MetaTable,
																title='meta_data', filters=self.filter_tables)

				self.meta_data_table.attrs.kwargs = yaml.dump(kwargs)
				self.dut['control']['RESET'] = 0b00
				self.dut['control'].write()
				time.sleep(0.1)

				self.fifo_readout = FifoReadout(self.dut)

				# write InjEnLd & PixConfLd to '1
				self.dut['pixel_conf'].setall(True)
				self.dut.write_pixel_col()
				self.dut['global_conf']['SignLd'] = 1
				self.dut['global_conf']['InjEnLd'] = 1
				self.dut['global_conf']['TDacLd'] = 0b1111
				self.dut['global_conf']['PixConfLd'] = 0b11
				self.dut.write_global()
				# write SignLd & TDacLd to '0
				self.dut['pixel_conf'].setall(False)
				self.dut.write_pixel_col()
				self.dut['global_conf']['SignLd'] = 0
				self.dut['global_conf']['InjEnLd'] = 0
				self.dut['global_conf']['TDacLd'] = 0b0000
				self.dut['global_conf']['PixConfLd'] = 0b00
				self.dut.write_global()
				# test hit
				self.dut['global_conf']['TestHit'] = 1
				self.dut['global_conf']['SignLd'] = 0
				self.dut['global_conf']['InjEnLd'] = 0
				self.dut['global_conf']['TDacLd'] = 0
				self.dut['global_conf']['PixConfLd'] = 0
				self.dut['global_conf']['OneSr'] = 0  # all multi columns in parallel
				self.dut['global_conf']['ColEn'][:] = bitarray.bitarray(columns)
				self.dut.write_global()
				self.dut['control']['RESET'] = 0b01
				self.dut['control']['DISABLE_LD'] = 1
				self.dut['control'].write()
				self.dut['control']['CLK_OUT_GATE'] = 1
				self.dut['control']['CLK_BX_GATE'] = 1
				self.dut['control'].write()
				time.sleep(0.1)

				self.dut['control']['RESET'] = 0b11
				self.dut['control'].write()

				# enable testhit pulse and trigger
				wiat_for_read = (16 + columns.count(True) * (4 * 64 / mask_steps) * 2) * (20 / 2) + 100
				self.dut['testhit'].set_delay(wiat_for_read*4)  # this should based on mask and enabled columns
				self.dut['testhit'].set_width(3)
				self.dut['testhit'].set_repeat(repeat_command)
				self.dut['testhit'].set_en(False)

				self.dut['trigger'].set_delay(400 - 4)
				self.dut['trigger'].set_width(8)
				self.dut['trigger'].set_repeat(1)
				self.dut['trigger'].set_en(True)

				lmask = [1] + ([0] * (mask_steps - 1))
				lmask = lmask * ((64 * 64) / mask_steps + 1)
				lmask = lmask[:64 * 64]
				bv_mask = bitarray.bitarray(lmask)

				with self.readout():

					for i in range(mask_steps):

						self.dut['pixel_conf'][:] = bv_mask
						bv_mask[1:] = bv_mask[0:-1]
						bv_mask[0] = 0

						self.dut.write_pixel_col()
						time.sleep(0.1)

						self.dut['testhit'].start()

						if os.environ.get('TRAVIS'):
							logging.debug('.')

						while not self.dut['testhit'].is_done():
							pass

						while not self.dut['trigger'].is_done():
							pass

						# just some time for last read
					self.dut['trigger'].set_en(False)

					self.fifo_readout.print_readout_status()
					self.meta_data_table.attrs.power_status = yaml.dump(self.dut.power_status())
					self.meta_data_table.attrs.dac_status = yaml.dump(self.dut.dac_status())
					self.h5_file.close()
					logging.info('Data Output Filename: %s', self.output_filename + '.h5')
					self.analyze()
		self.shmoo_plotting()
Beispiel #6
0
    scan_time = (timestamp_start * 5 - timestamp_start[0]).astype('uint')

    read_indices = np.array((0))
    read_indices = np.append(read_indices, np.where(np.diff(scan_time) != 0))
    stop_read_indices = read_indices[:-1] + np.diff(read_indices)

    with tb.openFile(output_file, mode="w") as out_file_h5:
        filter_raw_data = tb.Filters(complib='blosc',
                                     complevel=5,
                                     fletcher32=False)
        filter_tables = tb.Filters(complib='zlib',
                                   complevel=5,
                                   fletcher32=False)
        raw_data_earray = out_file_h5.createEArray(out_file_h5.root,
                                                   name='raw_data',
                                                   atom=tb.UIntAtom(),
                                                   shape=(0, ),
                                                   title='raw_data',
                                                   filters=filter_raw_data)
        meta_data_table = out_file_h5.createTable(out_file_h5.root,
                                                  name='meta_data',
                                                  description=MetaTable,
                                                  title='meta_data',
                                                  filters=filter_tables)
        for index in range(0, len(read_indices) - 1):
            print 'time', meta_data['timestamp_start'][0] - timestamp_start[0]
            if read_indices[index] != stop_read_indices[index]:
                meta_data = in_file_h5.root.meta_data.read(
                    read_indices[index], stop_read_indices[index])
                raw_data = in_file_h5.root.raw_data.read(
                    meta_data['index_start'][0], meta_data['index_stop'][-1])