Ejemplo n.º 1
0
    def actionRestore(self):
        """Restore dataset translated by values"""
        for uc, d in self.trcum.iteritems():
            obj = self.document.resolveFullWidgetPath(uc)
            logging.debug('restoring curve', uc, d, obj)
            yds_name = obj.settings.yData
            yds = self.document.data[yds_name]
            new = yds.data + d
            # Create a copy of the dataset
            ydsn = copy.copy(yds)
            # Change copy's values
            ydsn.data = new
            # Set original dataset to copied one
            op = document.OperationDatasetSet(yds_name, ydsn)
            self.ops.append(op)
        logging.debug('axmap', self.axmap)
        for uc, (newax, oldax, (minVal, maxVal)) in self.axmap.iteritems():
            obj = self.document.resolveFullWidgetPath(uc)
            self.toset(obj, 'yAxis', oldax)
            ox = self.parent.getChild(oldax)
            self.toset(ox, 'min', minVal)
            self.toset(ox, 'max', minVal)

            if newax != oldax:
                n = self.parent.getChild(newax)
                op = document.OperationWidgetDelete(n)
                self.ops.append(op)

        self.trcum = {}
        self.axmap = {}
        self.apply_ops('SynAxis: Restore')
    def add_datasets(self, start_index, start_value):
        """Add standard and fitted datasets for further evaluations (plotting, etc)"""
        # Adding plot data
        fields = self.fld
        name = fields['std'].replace(' ', '_')
        p = fields['d'] + '/' + name
        Tds = self.doc.data[fields['T']]
        T = Tds.data
        old_unit = getattr(self.ds, 'old_unit', 'percent')
        # Fitting
        #		f=np.poly1d((self.slope,0))
        f = np.poly1d((self.quad, self.slope, 0))
        df = f(T)
        df += start_value - df[start_index]
        # TODO: define new derived datasets for these
        dsf = copy(Tds)
        dsf.attr = dict({}, **Tds.attr)
        dsf.tags = set([])
        dsf.data = plugins.numpyCopyOrNone(df)
        dsf.m_var = name + '_fit'
        dsf.m_pos = 2
        dsf.m_name = dsf.m_var
        dsf.m_col = dsf.m_var
        dsf.old_unit = old_unit
        dsf.unit = 'percent'
        dsf.m_initialDimension = self.inidim
        dsf.m_label = _('Calibration Fitting')
        self.ops.append(document.OperationDatasetSet(p + '_fit', dsf))

        # Evaluate std fit over regular T
        d = self.f(T)
        # Translate zero so it matches the fit
        d -= d[start_index] - df[start_index]
        dsd = copy(Tds)
        dsd.attr = dict({}, **Tds.attr)
        dsd.tags = set([])
        dsd.data = plugins.numpyCopyOrNone(d)
        dsd.m_var = name
        dsd.m_pos = 1
        dsd.m_name = name
        dsd.m_col = name
        dsd.unit = 'percent'
        dsd.old_unit = old_unit
        dsd.m_initialDimension = self.inidim
        dsd.m_label = _(name)
        self.ops.append(document.OperationDatasetSet(p, dsd))
    def translate_values(self, dataset, dataset_name, delta, doc):
        translated_data = dataset.data - delta
        translated_dataset = copy.copy(dataset)
        translated_dataset.data = translated_data
        op = document.OperationDatasetSet(dataset_name, translated_dataset)
        self.ops.append(op)

        self.apply_ops()
        return True
Ejemplo n.º 4
0
    def newDataset(self, ds):
        """Add new dataset to document."""
        # get a name for dataset
        name = _('new dataset')
        if name in self.document.data:
            count = 1
            while name in self.document.data:
                name = _('new dataset %i') % count
                count += 1

        # add new dataset
        self.document.applyOperation(document.OperationDatasetSet(name, ds))

        self.dsbrowser.selectDataset(name)
Ejemplo n.º 5
0
    def removeGaps(self):
        gap_range = self.settings.setdict['remove_gaps_range'].val
        gaps_thershold = self.settings.setdict['remove_gaps_thershold'].val
        start_index = int(round(self.point_index - gap_range / 2.0))
        end_index = start_index + gap_range
        data = self.parent.settings.get('yData').getFloatArray(self.document)
        dataset_name = self.parent.settings.get('yData').val
        dataset = copy(self.document.data[dataset_name])
        data_without_gap = remove_gaps_from(data, gaps_thershold, start_index,
                                            end_index)

        dataset.data = data_without_gap
        operation = document.OperationDatasetSet(dataset_name, dataset)

        self.ops.append(operation)
        self.up_coord(yData=data_without_gap)

        self.apply_ops('Remove Gap')
Ejemplo n.º 6
0
def new_dataset_operation(original_dataset,
                          data,
                          name,
                          label,
                          path,
                          unit='volt',
                          opt=False,
                          error=None):
    """Create a new dataset by copying `original_dataset` and overwriting with `data`.
    Returns an operation to be executed by the document."""
    old_unit = unit
    if opt:
        if unit is False:
            unit = opt.get('csunit', False)
        old_unit = opt.get('unit', unit)
    old_unit = getattr(original_dataset, 'old_unit', old_unit)
    if not opt:
        opt = original_dataset.m_opt
    new_dataset = copy(original_dataset)
    new_dataset.attr = dict({}, **original_dataset.attr)
    new_dataset.tags = set([])
    new_dataset.data = plugins.numpyCopyOrNone(data)
    new_dataset.m_opt = opt
    new_dataset.m_var = name
    new_dataset.m_pos = 2
    new_dataset.m_col = new_dataset.m_var
    new_dataset.old_unit = old_unit
    new_dataset.unit = unit
    new_dataset.m_label = _(label)

    prefix = original_dataset.linked.prefix
    if not path.startswith(prefix):
        path = prefix + path.lstrip('/')
    new_dataset.m_name = path
    if error is not None:
        new_dataset.serr = error
    return document.OperationDatasetSet(path, new_dataset)
Ejemplo n.º 7
0
    def apply(self, interface, fields):
        """Do the work of the plugin.
        interface: veusz command line interface object (exporting commands)
        fields: dict mapping field names to values
        """
        self.ops = []
        self.doc = interface.document
        # raise DatasetPluginException if there are errors
        ds = interface.document.data.get(fields['ds'], False)
        if not ds:
            raise plugins.DatasetPluginException('Dataset not found' +
                                                 fields['ds'])
        if isinstance(ds, document.datasets.Dataset1DPlugin):
            logging.error(
                'Cannot convert to percent a derived dataset. Please convert the source.'
            )
            return False

        action = units.percent_action(ds, fields['action'])
        ds1 = units.percent_conversion(ds, action, fields['auto'])
        ds = ds1
        self.ops.append(document.OperationDatasetSet(fields['ds'], ds))
        #self.ops.append(document.OperationDatasetSetVal(fields['ds'], 'data',slice(None,None),ds1.data[:]))

        self.apply_ops()
        logging.debug('Converted %s %s using initial dimension %.2f.' %
                      (fields['ds'], fields['action'], ds.m_initialDimension))
        # 		QtGui.QMessageBox.information(None,'Percentage output',
        # 				'Converted %s %s using initial dimension %.2f.' % (fields['ds'], msg, ds.m_initialDimension))

        # updating all dependent datapoints
        convert_func = units.percent_func(ds, action, fields['auto'])
        utils.convert_datapoint_units(convert_func, fields['ds'], self.doc)

        if not fields['propagate']:
            return
        # Find all datasets plotted with the same Y axis
        cvt = []
        tree = get_plotted_tree(self.doc.basewidget)
        upax = []
        for axp, dslist in tree['axis'].iteritems():
            if not fields['ds'] in dslist:
                continue
            logging.debug('Propagating to', cvt)
            cvt += dslist
            upax.append(axp)
        cvt = list(set(cvt))
        if fields['ds'] in cvt:
            cvt.remove(fields['ds'])
        act = 'To Percent' if ds.m_percent else 'To Absolute'
        # Create a non-propagating percentage operation for each dataset found
        for nds in cvt:
            ncur = getattr(self.doc.data[nds], 'm_percent', None)
            if ncur == ds.m_percent:
                continue
            logging.debug('Really propagating percentage to', nds)
            fields = {
                'ds': nds,
                'propagate': False,
                'action': act,
                'auto': True
            }
            self.ops.append(
                document.OperationToolsPlugin(PercentPlugin(), fields))
        # Update axis labels
        old = units.symbols.get(ds.old_unit, False)
        new = units.symbols.get(ds.unit, False)
        if old and new:
            for ax in upax:
                ax = self.doc.resolveFullWidgetPath(ax)
                lbl = ax.settings.label.replace(old, new)
                self.toset(ax, 'label', lbl)
        # Apply everything
        self.apply_ops('Percentage: Propagate')
Ejemplo n.º 8
0
    def actionUp(self):
        logging.debug('SYNC LINE UP')
        self.ops = []
        doc = self.document
        ref = None  # Reference curve is the first child
        xref = []
        yref = []
        # Fractional position on x axis
        apos = self.settings.otherPosition
        # Translation mode
        trmode = self.settings.trans
        # Maximum translations
        up_ext = 0
        down_ext = 0
        axmap = {}
        objmap = {}
        # Search for all curves in parent graph
        for i in range(self.ncurves):
            u = 'curve' + str(i)
            uc = getattr(self.settings, u)
            obj = self.parent.getChild(uc)
            if obj is None:
                if i == 0:
                    logging.debug('No reference curve defined')
                    return
                break
            xax = self.parent.getChild(obj.settings.xAxis)
            # Get the y axis
            yax = self.parent.getChild(obj.settings.yAxis)
            if None in [xax, yax]:
                continue
            # Obtain real position relative to xax
            pos = apos * \
                (xax.plottedrange[1] - xax.plottedrange[0]) + \
                xax.plottedrange[0]
            # Set reference values
            if ref is None:
                ref = obj
                xref = doc.data[ref.settings.xData].data
                yref = doc.data[ref.settings.yData].data
                yaxref = yax
                # Reference ranges
                ymin_ref, ymax_ref = yaxref.plottedrange
                # Search the nearest X value on ref X-array
                dst = np.abs(xref - pos)
                i = np.where(dst == dst.min())[0][0]
                # Get the corresponding Y value on the ref Y-array
                yval_ref = yref[i]
                axmap[yax.name] = obj
                objmap[obj] = (yax, 0)
                continue

            # Getting curve data
            xtr = doc.data[obj.settings.xData].data
            yds_name = obj.settings.yData
            yds = doc.data[yds_name]
            ytr = yds.data
            # Search the nearest X value on trans X-array
            dst = np.abs(xtr - pos)
            i = np.where(dst == dst.min())[0][0]
            # Delta
            d = ytr[i] - yval_ref
            objmap[obj] = (yax, d)
            # Create axes - only for axis translation
            if trmode == 'Values':
                new = yds.data - d
                # Create a copy of the dataset
                ydsn = copy.copy(yds)
                # Change copy's values
                ydsn.data = new
                # Set original dataset to copied one
                op = document.OperationDatasetSet(yds_name, ydsn)
                self.ops.append(op)
                # Remember translation
                if not self.trcum.has_key(obj.path):
                    self.trcum[obj.path] = 0
                self.trcum[obj.path] += d
                continue

            # Remember for future translation
            if ytr.min() < ymin_ref:
                ymin_ref = ytr.min()
            if ytr.max() > ymax_ref:
                ymax_ref = ytr.max()

            # Each Y axis MUST be unique.
            # Create new one if current obj happens to share its axis with a
            # previous one
            if axmap.has_key(yax.name):
                axname = 'ax_%s_%s' % (obj.name, self.name)
                axset = {
                    'name': axname,
                    'direction': 'vertical',
                    'label': 'Trans:' + yax.settings.label,
                    'hide': False
                }  # will be True! don't want to see all that axes
                self.ops.append(
                    document.OperationWidgetAdd(self.parent, 'axis', **axset))
                self.toset(obj, 'yAxis', axname)
                axmap[axname] = obj
                self.axmap[obj.path] = (axname, yax.name, (yax.settings.min,
                                                           yax.settings.max))


# 			else:
# 				self.axmap[obj.path]=(yax.name,yax.name,(yax.settings.min,yax.settings.max))

        if trmode == 'Values':
            # Remove dissociated objects
            ucs = set([obj.path for obj in objmap.keys()])
            for uc in set(self.trcum.keys()) - ucs:
                del self.trcum[uc]
            self.apply_ops('SynAxis: Translation')
            return

        # Remove unused restore info
        ucs = set([obj.path for obj in objmap.keys()])
        for uc in set(self.axmap.keys()) - ucs:
            del self.axmap[uc]

        self.trcum = {}
        # Apply axis creation
        self.apply_ops('SynAxis: Create')
        # Extend axes and apply translations
        self.toset(yaxref, 'max', float(ymax_ref))
        self.toset(yaxref, 'min', float(ymin_ref))
        for obj, (yax, d) in objmap.iteritems():
            self.toset(yax, 'max', float(ymax_ref + d))
            self.toset(yax, 'min', float(ymin_ref + d))

        self.apply_ops('SynAxis: Update')
    def apply(self, interface, fields):
        """Do the work of the plugin.
        interface: veusz command line interface object (exporting commands)
        fields: dict mapping field names to values
        """
        self.ops = []
        self.doc = interface.document
        # raise DatasetPluginException if there are errors
        ds = self.doc.data.get(fields['ds'], False)
        if not ds:
            raise plugins.DatasetPluginException('Dataset not found' +
                                                 fields['ds'])
        out = numpy.array(ds.data)
        # If data was converted to percentage, convert back to real numbers
        percent = getattr(ds, 'm_percent', False)
        if percent:
            out = out * ds.m_initialDimension / 100.
        # Calculate automatic initial value
        ini = fields['ini']
        n = fields['num']
        start = fields['start']
        if fields['auto']:
            if n > len(out) / 2:
                raise plugins.DatasetPluginException(
                    'Too many points used for calculation: %i/%i' %
                    (n, len(out)))
            x = interface.document.data.get(fields['ds_x'], False)
            if x is not False:
                x = numpy.array(x.data)
            i = 0
            # Cut from start T
            if start != -1 and x is not False:
                diff = abs(x - start)
                i = numpy.where(diff == min(diff))[0][0]
                x = x[i:]
            ini = out[i:i + n]
            if fields['method'] == 'mean':
                ini = ini.mean()
            elif fields['method'] == 'linear-regression':
                if x is False:
                    raise plugins.DatasetPluginException('Dataset not found' +
                                                         fields['ds_x'])

                (slope, const) = scipy.polyfit(x[:n], ini, 1)
                ini = x[0] * slope + const

        # Convert back to percent if needed
        ds1 = copy(ds)
        if percent:
            out = 100. * out / ini
            ds1.data = plugins.numpyCopyOrNone(out)
        orig = getattr(ds, 'm_initialDimension', False)
        if orig and orig != ini and not fields['suppress_messageboxes']:
            repl = QtGui.QMessageBox.warning(
                None,
                'Initial dimension',
                'Changing initial dimension from %.2f to %.2f. Confirm?' %
                (orig, ini),
                QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel,
                defaultButton=QtGui.QMessageBox.Ok)
            if repl != QtGui.QMessageBox.Ok:
                QtGui.QMessageBox.information(None, 'Initial dimension',
                                              'Change cancelled')
                return
        ds1.m_initialDimension = ini
        self.ops.append(document.OperationDatasetSet(fields['ds'], ds1))
        self.apply_ops()
        if not fields['suppress_messageboxes']:
            QtGui.QMessageBox.information(
                None, 'Initial dimension output',
                'Initial dimension configured to %.2f' % ini)
Ejemplo n.º 10
0
    def apply(self, interface, fields):
        """Do the work of the plugin.
        interface: veusz command line interface object (exporting commands)
        fields: dict mapping field names to values
        """
        self.ops = []
        self.doc = interface.document

        ds = interface.document.data.get(fields['ds'], False)
        if not ds:
            raise plugins.DatasetPluginException('Dataset not found' +
                                                 fields['ds'])

        ds1 = units.convert(ds, fields['convert'])
        self.ops.append(document.OperationDatasetSet(fields['ds'], ds1))
        self.apply_ops()

        # Update DataPoints
        convert_func = units.convert_func(ds, fields['convert'])
        utils.convert_datapoint_units(convert_func, fields['ds'], self.doc)

        # Update file-wise time unit if ds is the time dataset
        if ds.linked and fields['ds'] == ds.linked.prefix + 't':
            ds.linked.params.time_unit = ds1.unit

        ####
        # PROPAGATION
        if not fields['propagate']:
            return
        # Find all datasets plotted with the same Y axis
        cvt = []
        tree = get_plotted_tree(self.doc.basewidget)
        upax = []
        for axp, dslist in tree['axis'].iteritems():
            if not fields['ds'] in dslist:
                continue
            logging.debug('Propagating to', cvt)
            cvt += dslist
            upax.append(axp)
        # If time dataset, propagate to all time datasets
        if ds.m_var == 't':
            for k, nds in self.doc.data.iteritems():
                if k == fields['ds']:
                    continue
                if getattr(nds, 'm_var', False) != 't':
                    continue
                cvt.append(k)
        cvt = list(set(cvt))
        # Create a non-propagating unit conversion operation for each dataset
        # found
        for nds in cvt:
            if nds == fields['ds']:
                continue
            ncur = getattr(self.doc.data[nds], 'unit', False)
            if not ncur:
                continue
            logging.debug('Really propagating unit conversion to', nds)
            fields = {
                'ds': nds,
                'propagate': False,
                'convert': fields['convert']
            }
            self.ops.append(
                document.OperationToolsPlugin(UnitsConverterTool(), fields))
        # Update axis labels
        old = units.symbols.get(ds.unit, False)
        new = units.symbols.get(fields['convert'], False)
        if old and new:
            for ax in upax:
                ax = self.doc.resolveFullWidgetPath(ax)
                lbl = ax.settings.label.replace(old, new)
                self.toset(ax, 'label', lbl)

        # Apply everything
        self.apply_ops('UnitsConverterTool: Propagate')