Example #1
0
    def update_descriptors(self):

        logger.debug(
            "Updating Plotter %s descriptors based on input descriptor %s",
            self.name, self.sink.descriptor)
        self.stream = self.sink.input_streams[0]
        self.descriptor = self.sink.descriptor
        try:
            self.time_pts = self.descriptor.axes[self.descriptor.axis_num(
                "time")].points
            self.record_length = len(self.time_pts)
        except ValueError:
            raise ValueError(
                "Single shot filter sink does not appear to have a time axis!")
        self.num_segments = len(self.sink.descriptor.axes[
            self.descriptor.axis_num("segment")].points)
        self.ground_data = np.zeros(
            (self.record_length, self.num_segments // 2), dtype=np.complex)
        self.excited_data = np.zeros(
            (self.record_length, self.num_segments // 2), dtype=np.complex)

        output_descriptor = DataStreamDescriptor()
        output_descriptor.axes = [
            _ for _ in self.descriptor.axes if type(_) is SweepAxis
        ]
        output_descriptor._exp_src = self.sink.descriptor._exp_src
        output_descriptor.dtype = np.complex128
        for os in self.fidelity.output_streams:
            os.set_descriptor(output_descriptor)
            os.end_connector.update_descriptors()
Example #2
0
    def get_descriptor(self, source_instr_settings, channel_settings):
        channel = AlazarChannel(channel_settings)

        # Add the time axis
        samp_time = 1.0/source_instr_settings['sampling_rate']
        descrip = DataStreamDescriptor()
        descrip.add_axis(DataAxis("time", samp_time*np.arange(source_instr_settings['record_length'])))
        return channel, descrip
Example #3
0
    def init_streams(self):
        descrip = DataStreamDescriptor()
        descrip.data_name='current_input'
        descrip.add_axis(DataAxis("time", np.arange(int(self.sample_rate*self.num_bursts/self.frequency))/self.sample_rate))
        self.current_input.set_descriptor(descrip)

        descrip = DataStreamDescriptor()
        descrip.data_name='voltage_sample'
        descrip.add_axis(DataAxis("time", np.arange(int(self.sample_rate*self.num_bursts/self.frequency))/self.sample_rate))
        self.voltage_sample.set_descriptor(descrip)
Example #4
0
    def init_streams(self):
        # Baked in data axes
        descrip = DataStreamDescriptor()
        descrip.add_axis(DataAxis("time", 1e-9 * np.arange(self.samples)))
        if len(self.gate_durs) > 1:
            descrip.add_axis(DataAxis("gate_pulse_duration", self.gate_durs))
        descrip.add_axis(DataAxis("gate_pulse_amplitude", self.gate_amps))
        descrip.add_axis(DataAxis("attempt", range(self.attempts)))

        self.voltage.set_descriptor(descrip)
Example #5
0
 def init_streams(self):
     descrip = DataStreamDescriptor()
     descrip.data_name = 'voltage'
     descrip.add_axis(DataAxis("sample", range(self.samps_per_trig)))
     descrip.add_axis(DataAxis("state", range(2)))
     descrip.add_axis(DataAxis("attempt", range(self.attempts)))
     self.voltage.set_descriptor(descrip)
Example #6
0
 def get_descriptor(self, stream_selector, receiver_channel):
     """Get the axis descriptor corresponding to this stream selector. For the Alazar cards this
     is always just a time axis."""
     samp_time = 1.0 / receiver_channel.receiver.sampling_rate
     descrip = DataStreamDescriptor()
     descrip.add_axis(
         DataAxis(
             "time",
             samp_time *
             np.arange(receiver_channel.receiver.record_length)))
     return descrip
Example #7
0
 def init_streams(self):
     # Baked in data axes
     descrip = DataStreamDescriptor()
     descrip.data_name = 'voltage'
     descrip.add_axis(DataAxis("sample", range(self.samps_per_trig)))
     descrip.add_axis(DataAxis("amplitude", self.amplitudes))
     descrip.add_axis(DataAxis("repeat", range(self.repeats)))
     self.voltage.set_descriptor(descrip)
Example #8
0
    def update_descriptors(self):
        if not self.simple_kernel and self.kernel.value is None:
            raise ValueError("Integrator was passed kernel None")

        logger.debug(
            'Updating KernelIntegrator "%s" descriptors based on input descriptor: %s.',
            self.name, self.sink.descriptor)

        record_length = self.sink.descriptor.axes[-1].num_points()
        if self.simple_kernel.value:
            time_pts = self.sink.descriptor.axes[-1].points
            time_step = time_pts[1] - time_pts[0]
            kernel = np.zeros(record_length, dtype=np.complex128)
            sample_start = int(self.box_car_start.value / time_step)
            sample_stop = int(self.box_car_stop.value / time_step) + 1
            kernel[sample_start:sample_stop] = 1.0
            # add modulation
            kernel *= np.exp(2j * np.pi * self.frequency.value * time_step *
                             time_pts)
        elif os.path.exists(
                os.path.join(config.KernelDir, self.kernel.value + '.txt')):
            kernel = np.loadtxt(
                os.path.join(config.KernelDir, self.kernel.value + '.txt'),
                dtype=complex,
                converters={
                    0: lambda s: complex(s.decode().replace('+-', '-'))
                })
        else:
            try:
                kernel = eval(self.kernel.value.encode('unicode_escape'))
            except:
                raise ValueError(
                    'Kernel invalid. Provide a file name or an expression to evaluate'
                )
        # pad or truncate the kernel to match the record length
        if kernel.size < record_length:
            self.aligned_kernel = np.append(
                kernel,
                np.zeros(record_length - kernel.size, dtype=np.complex128))
        else:
            self.aligned_kernel = np.resize(kernel, record_length)

        # Integrator reduces and removes axis on output stream
        # update output descriptors
        output_descriptor = DataStreamDescriptor()
        # TODO: handle reduction to single point
        output_descriptor.axes = self.sink.descriptor.axes[:-1]
        output_descriptor._exp_src = self.sink.descriptor._exp_src
        output_descriptor.dtype = np.complex128
        for ost in self.source.output_streams:
            ost.set_descriptor(output_descriptor)
            ost.end_connector.update_descriptors()
Example #9
0
    def update_descriptors(self):
        if not self.kernel_type:
            raise ValueError("Integrator was passed kernel None")

        logger.debug(
            'Updating WindowIntegrator "%s" descriptors based on input descriptor: %s.',
            self.name, self.sink.descriptor)

        record_length = self.sink.descriptor.axes[-1].num_points()

        time_pts = self.sink.descriptor.axes[-1].points
        time_step = time_pts[1] - time_pts[0]
        kernel = np.zeros(record_length, dtype=np.complex128)
        sample_start = int(self.box_car_start.value / time_step)
        sample_stop = int(self.box_car_stop.value / time_step) + 1
        if self.kernel_type == 'boxcar':
            kernel[sample_start:sample_stop] = 1.0
        elif self.kernel_type == 'chebwin':
            # create a Dolph-Chebyshev window with 100 dB attenuation
            kernel[sample_start:sample_stop] = \
                chebwin(sample_start-sample_stop, at=100)
        elif self.kernel_type == 'blackman':
            kernel[sample_start:sample_stop] = \
                blackman(sample_start-sample_stop)
        elif self.kernel_type == 'slepian':
            # create a Slepian window with 0.2 bandwidth
            kernel[sample_start:sample_stop] = \
                slepian(sample_start-sample_stop, width=0.2)

        # add modulation
        kernel *= np.exp(2j * np.pi * self.frequency.value * time_step *
                         time_pts)

        # pad or truncate the kernel to match the record length
        if kernel.size < record_length:
            self.aligned_kernel = np.append(
                kernel,
                np.zeros(record_length - kernel.size, dtype=np.complex128))
        else:
            self.aligned_kernel = np.resize(kernel, record_length)

        # Integrator reduces and removes axis on output stream
        # update output descriptors
        output_descriptor = DataStreamDescriptor()
        # TODO: handle reduction to single point
        output_descriptor.axes = self.sink.descriptor.axes[:-1]
        output_descriptor._exp_src = self.sink.descriptor._exp_src
        output_descriptor.dtype = np.complex128
        for os in self.source.output_streams:
            os.set_descriptor(output_descriptor)
            os.end_connector.update_descriptors()
 def init_streams(self):
     # Baked in data axes
     descrip = DataStreamDescriptor()
     descrip.add_axis(DataAxis("sample", range(self.samps_per_trig)))
     descrip.add_axis(DataAxis("state", range(2)))
     descrip.add_axis(DataAxis("attempt", range(self.attempts.value)))
     self.voltage.set_descriptor(descrip)
Example #11
0
 def init_streams(self):
     descrip = DataStreamDescriptor()
     descrip.add_axis(
         DataAxis("samples", 2e-9 * np.arange(self.num_samples)))
     descrip.add_axis(DataAxis("delay", self.delays))
     descrip.add_axis(DataAxis("round_robins",
                               np.arange(self.round_robins)))
     self.voltage.set_descriptor(descrip)
Example #12
0
    def init_streams(self):
        # Baked in data axes
        descrip = DataStreamDescriptor()
        descrip.data_name='voltage'
        descrip.add_axis(DataAxis("sample", range(int(self.integration_time*self.ai_clock))))
        descrip.add_axis(DataAxis("attempt", range(self.attempts)))
        self.voltage_chan.set_descriptor(descrip)

        # descrip = DataStreamDescriptor()
        # descrip.data_name='voltage'
        # descrip.add_axis(DataAxis("sample", range(int(self.integration_time*self.ai_clock))))
        # descrip.add_axis(DataAxis("attempt", range(self.attempts)))
        # self.voltage_MTJ.set_descriptor(descrip)

        descrip = DataStreamDescriptor()
        descrip.data_name='resistance'
        self.resistance_MTJ.set_descriptor(descrip)
Example #13
0
def load_from_HDF5_legacy(filename_or_fileobject):
    data = {}
    if isinstance(filename_or_fileobject, h5py.File):
        f = filename_or_fileobject
    else:
        f = h5py.File(filename_or_fileobject, 'r')
    for groupname in f:
        # Reconstruct the descrciptor
        descriptor = DataStreamDescriptor()
        g = f[groupname]
        axis_refs = g['descriptor']
        for ref in reversed(axis_refs):
            ax = g[ref]
            if not 'unit' in ax.attrs:
                # Unstructured

                names = [k for k in ax.dtype.fields.keys()]
                units = [ax.attrs['unit_'+name] for name in names]
                points = ax[:]
                points = points.view(np.float32).reshape(points.shape + (-1,))
                descriptor.add_axis(DataAxis(names, points=points, unit=units))
            else:
                # Structured
                name = ax.attrs['NAME'].decode('UTF-8')
                unit = ax.attrs['unit']
                points = ax[:]
                descriptor.add_axis(DataAxis(name, points=points, unit=unit))

        for attr_name in axis_refs.attrs.keys():
            descriptor.metadata[attr_name] = axis_refs.attrs[attr_name]

        data[groupname] = g['data'][:]

    f.close()
    return data, descriptor
Example #14
0
def load_from_HDF5(filename_or_fileobject,
                   reshape=True,
                   return_structured_array=True):
    data = {}
    descriptors = {}
    if isinstance(filename_or_fileobject, h5py.File):
        f = filename_or_fileobject
    else:
        f = h5py.File(filename_or_fileobject, 'r')
    for groupname in f:
        # Reconstruct the descrciptor
        descriptor = DataStreamDescriptor()
        g = f[groupname]
        axis_refs = g['descriptor']
        for ref in reversed(axis_refs):
            ax = g[ref]
            if ax.attrs['unstructured']:
                # The entry for the main unstructured axis contains
                # references to the constituent axes.

                # The DataAxis expects points as tuples coordinates
                # in the form [(x1, y1), (x2, y2), ...].
                points = np.vstack([g[e] for e in ax[:]]).T
                names = [g[e].attrs["name"] for e in ax[:]]
                units = [g[e].attrs["unit"] for e in ax[:]]
                descriptor.add_axis(DataAxis(names, points=points, unit=units))
            else:
                name = ax.attrs['name']
                unit = ax.attrs['unit']
                points = ax[:]
                descriptor.add_axis(DataAxis(name, points=points, unit=unit))

        for attr_name in axis_refs.attrs.keys():
            descriptor.metadata[attr_name] = axis_refs.attrs[attr_name]

        col_names = list(g['data'].keys())
        if return_structured_array:
            dtype = [(g['data'][n].attrs['name'], g['data'][n].dtype.char)
                     for n in col_names]
            length = g['data'][col_names[0]].shape[0]
            group_data = np.empty((length, ), dtype=dtype)
            for cn in col_names:
                group_data[cn] = g['data'][cn]
        else:
            group_data = {n: g['data'][n][:] for n in col_names}

        if reshape:
            group_data = group_data.reshape(descriptor.dims())

        data[groupname] = group_data
        descriptors[groupname] = descriptor
    if not isinstance(filename_or_fileobject, h5py.File):
        f.close()
    return data, descriptors
Example #15
0
    def update_descriptors(self):
        logger.debug(
            'Updating Channelizer "%s" descriptors based on input descriptor: %s.',
            self.name, self.sink.descriptor)

        # extract record time sampling
        self.time_pts = self.sink.descriptor.axes[-1].points
        self.record_length = len(self.time_pts)
        self.time_step = self.time_pts[1] - self.time_pts[0]
        logger.debug("Channelizer time_step = {}".format(self.time_step))

        # We will be decimating along a time axis, which is always
        # going to be the last axis given the way we usually take data.
        # TODO: perform this function along a named axis rather than a numbered axis
        # in case something about this changes.

        # update output descriptors
        decimated_descriptor = DataStreamDescriptor()
        decimated_descriptor.axes = self.sink.descriptor.axes[:]
        decimated_descriptor.axes[-1] = deepcopy(self.sink.descriptor.axes[-1])
        decimated_descriptor.axes[-1].points = self.sink.descriptor.axes[
            -1].points[self.decimation_factor.value -
                       1::self.decimation_factor.value]
        decimated_descriptor.axes[
            -1].original_points = decimated_descriptor.axes[-1].points
        decimated_descriptor._exp_src = self.sink.descriptor._exp_src
        decimated_descriptor.dtype = np.complex64
        self.output_descriptor = decimated_descriptor
        for os in self.source.output_streams:
            os.set_descriptor(decimated_descriptor)
            if os.end_connector is not None:
                os.end_connector.update_descriptors()
Example #16
0
    def update_descriptors(self):
        if not self.simple_kernel and self.kernel.value is None:
            raise ValueError("Integrator was passed kernel None")

        logger.debug(
            'Updating KernelIntegrator "%s" descriptors based on input descriptor: %s.',
            self.name, self.sink.descriptor)

        record_length = self.sink.descriptor.axes[-1].num_points()
        if self.simple_kernel.value:
            time_pts = self.sink.descriptor.axes[-1].points
            time_step = time_pts[1] - time_pts[0]
            kernel = np.zeros(record_length, dtype=np.complex128)
            sample_start = int(self.box_car_start.value / time_step)
            sample_stop = int(self.box_car_stop.value / time_step) + 1
            kernel[sample_start:sample_stop] = 1.0
            # add modulation
            kernel *= np.exp(2j * np.pi * self.frequency.value * time_step *
                             time_pts)
        else:
            kernel = eval(self.kernel.value.encode('unicode_escape'))
        # pad or truncate the kernel to match the record length
        if kernel.size < record_length:
            self.aligned_kernel = np.append(
                kernel,
                np.zeros(record_length - kernel.size, dtype=np.complex128))
        else:
            self.aligned_kernel = np.resize(kernel, record_length)

        # Integrator reduces and removes axis on output stream
        # update output descriptors
        output_descriptor = DataStreamDescriptor()
        # TODO: handle reduction to single point
        output_descriptor.axes = self.sink.descriptor.axes[:-1]
        output_descriptor._exp_src = self.sink.descriptor._exp_src
        output_descriptor.dtype = np.complex128
        for os in self.source.output_streams:
            os.set_descriptor(output_descriptor)
            os.end_connector.update_descriptors()
Example #17
0
 def init_streams(self):
     # Add a "base" data axis: say we are averaging 5 samples per trigger
     descrip = DataStreamDescriptor()
     descrip.data_name = 'voltage'
     if self.is_complex:
         descrip.dtype = np.complex128
     descrip.add_axis(DataAxis("samples", list(range(self.samples))))
     self.voltage.set_descriptor(descrip)
Example #18
0
    def get_descriptor(self, source_instr_settings, channel_settings):
        # Create a channel
        channel = X6Channel(channel_settings)

        descrip = DataStreamDescriptor()
        # If it's an integrated stream, then the time axis has already been eliminated.
        # Otherswise, add the time axis.
        if channel_settings['stream_type'] == 'Raw':
            samp_time = 4.0e-9
            descrip.add_axis(DataAxis("time", samp_time*np.arange(source_instr_settings['record_length']//4)))
            descrip.dtype = np.float64
        elif channel_settings['stream_type'] == 'Demodulated':
            samp_time = 32.0e-9
            descrip.add_axis(DataAxis("time", samp_time*np.arange(source_instr_settings['record_length']//32)))
            descrip.dtype = np.complex128
        else: # Integrated
            descrip.dtype = np.complex128

        return channel, descrip
Example #19
0
    def refine_func(sweep_axis):
        points, mean = sw.load_switching_data(wr.filename)
        new_points = refine.refine_scalar_field(points,
                                                mean,
                                                all_points=False,
                                                criterion="integral",
                                                threshold="one_sigma")
        if len(points) + len(new_points) > max_points:
            print("Reached maximum points ({}).".format(max_points))
            return False
        print("Reached {} points.".format(len(points) + len(new_points)))
        sweep_axis.add_points(new_points)

        # Plot previous mesh
        x = [list(el) for el in points[mesh.simplices, 0]]
        y = [list(el) for el in points[mesh.simplices, 1]]
        val = [np.mean(vals) for vals in mean[mesh.simplices]]

        desc = DataStreamDescriptor()
        desc.add_axis(sweep_axis)
        exp.push_to_plot(fig1, desc, points)

        time.sleep(1)
        return True
Example #20
0
 def test_copy_descriptor(self):
     dsd = DataStreamDescriptor()
     dsd.add_axis(DataAxis("One", [1, 2, 3, 4]))
     dsd.add_axis(DataAxis("Two", [1, 2, 3, 4, 5]))
     self.assertTrue(len(dsd.axes) == 2)
     self.assertTrue("One" in [a.name for a in dsd.axes])
     dsdc = copy(dsd)
     self.assertTrue(dsd.axes == dsdc.axes)
     ax = dsdc.pop_axis("One")
     self.assertTrue(ax.name == "One")
     self.assertTrue(len(dsdc.axes) == 1)
     self.assertTrue(dsdc.axes[0].name == "Two")
Example #21
0
 def get_descriptor(self, stream_selector, receiver_channel):
     """Get the axis descriptor corresponding to this stream selector. If it's an integrated stream,
     then the time axis has already been eliminated. Otherswise, add the time axis."""
     descrip = DataStreamDescriptor()
     if stream_selector.stream_type == 'raw':
         samp_time = 4.0e-9
         descrip.add_axis(
             DataAxis(
                 "time",
                 samp_time *
                 np.arange(receiver_channel.receiver.record_length // 4)))
         descrip.dtype = np.float64
     elif stream_selector.stream_type == 'demodulated':
         samp_time = 32.0e-9
         descrip.add_axis(
             DataAxis(
                 "time",
                 samp_time *
                 np.arange(receiver_channel.receiver.record_length // 32)))
         descrip.dtype = np.complex128
     else:  # Integrated
         descrip.dtype = np.complex128
     return descrip
Example #22
0
    def update_descriptors(self):
        logger.debug(
            'Updating Channelizer "%s" descriptors based on input descriptor: %s.',
            self.name, self.sink.descriptor)

        # extract record time sampling
        time_pts = self.sink.descriptor.axes[-1].points
        self.record_length = len(time_pts)
        self.time_step = time_pts[1] - time_pts[0]
        logger.debug("Channelizer time_step = {}".format(self.time_step))

        # convert bandwidth normalized to Nyquist interval
        n_bandwidth = self.bandwidth.value * self.time_step * 2
        n_frequency = self.frequency.value * self.time_step * 2

        # arbitrarily decide on three stage filter pipeline
        # 1. first stage decimating filter on real data
        # 2. second stage decimating filter on mixed product to boost n_bandwidth
        # 3. final channel selecting filter at n_bandwidth/2

        # anecdotally don't decimate more than a factor of eight for stability

        self.decim_factors = [1] * 3
        self.filters = [None] * 3

        # first stage decimating filter
        # maximize first stage decimation:
        #     * minimize subsequent stages time taken
        #     * filter and decimate while signal is still real
        #     * first stage decimation cannot be too large or then 2omega signal from mixing will alias
        d1 = 1
        while (d1 < 8) and (2 * n_frequency <=
                            0.8 / d1) and (d1 < self.decimation_factor.value):
            d1 *= 2
            n_bandwidth *= 2
            n_frequency *= 2

        if d1 > 1:
            # create an anti-aliasing filter
            # pass-band to 0.8 * decimation factor; anecdotally single precision needs order <= 4 for stability
            b, a = scipy.signal.cheby1(4, 3, 0.8 / d1)
            b = np.float32(b)
            a = np.float32(a)
            self.decim_factors[0] = d1
            self.filters[0] = (b, a)

        # store decimated reference for mix down
        ref = np.exp(2j * np.pi * self.frequency.value * time_pts[::d1],
                     dtype=np.complex64)
        self.reference_r = np.real(ref)
        self.reference_i = np.imag(ref)

        # second stage filter to bring n_bandwidth/2 up
        # decimation cannot be too large or will impinge on channel bandwidth (keep n_bandwidth/2 <= 0.8)
        d2 = 1
        while (d2 < 8) and ((d1 * d2) < self.decimation_factor.value) and (
                n_bandwidth / 2 <= 0.8):
            d2 *= 2
            n_bandwidth *= 2
            n_frequency *= 2

        if d2 > 1:
            # create an anti-aliasing filter
            # pass-band to 0.8 * decimation factor; anecdotally single precision needs order <= 4 for stability
            b, a = scipy.signal.cheby1(4, 3, 0.8 / d2)
            b = np.float32(b)
            a = np.float32(a)
            self.decim_factors[1] = d2
            self.filters[1] = (b, a)

        # final channel selection filter
        if n_bandwidth < 0.1:
            raise ValueError(
                "Insufficient decimation to achieve stable filter")

        b, a = scipy.signal.cheby1(4, 3, n_bandwidth / 2)
        b = np.float32(b)
        a = np.float32(a)
        self.decim_factors[2] = self.decimation_factor.value // (d1 * d2)
        self.filters[2] = (b, a)

        # update output descriptors
        decimated_descriptor = DataStreamDescriptor()
        decimated_descriptor.axes = self.sink.descriptor.axes[:]
        decimated_descriptor.axes[-1] = deepcopy(self.sink.descriptor.axes[-1])
        decimated_descriptor.axes[-1].points = self.sink.descriptor.axes[
            -1].points[self.decimation_factor.value -
                       1::self.decimation_factor.value]
        decimated_descriptor.axes[
            -1].original_points = decimated_descriptor.axes[-1].points
        decimated_descriptor.exp_src = self.sink.descriptor.exp_src
        decimated_descriptor.dtype = np.complex64
        for os in self.source.output_streams:
            os.set_descriptor(decimated_descriptor)
            if os.end_connector is not None:
                os.end_connector.update_descriptors()
Example #23
0
    def init_streams(self):
        descrip = DataStreamDescriptor()
        descrip.data_name = 'voltage_input'
        descrip.add_axis(DataAxis("index", np.arange(self.num_points + 2)))
        descrip.add_axis(DataAxis("repeat", np.arange(self.repeat)))
        self.voltage_input.set_descriptor(descrip)

        descrip = DataStreamDescriptor()
        descrip.data_name = 'voltage_sample'
        descrip.add_axis(DataAxis("index", np.arange(self.num_points + 2)))
        descrip.add_axis(DataAxis("repeat", np.arange(self.repeat)))
        self.voltage_sample.set_descriptor(descrip)
Example #24
0
 def init_streams(self):
     descrip = DataStreamDescriptor()
     descrip.data_name='voltage'
     descrip.add_axis(DataAxis("sample", range(int(self.meas_duration*self.ai_clock))))
     descrip.add_axis(DataAxis("attempt", range(self.attempts)))
     self.voltage.set_descriptor(descrip)