Esempio n. 1
0
        def __init__(self, *args, **kwargs):
            my_kwargs = self.pop_kwargs(
                kwargs, *[
                    prop.label
                    for prop in self.Meta.get_local_persistent_properties()
                ])
            super(_BaseR0Model, self).__init__(R=0, *args, **kwargs)

            with self.data_changed.hold():
                if self.G > 1 and "W1" in my_kwargs:  # old-style model
                    for i in range(self.G - 1):
                        name = "W%d" % (i + 1)
                        self.mW[i] = not_none(my_kwargs.get(name, None), 0.8)
                        name = "F%d" % (i + 1)
                        setattr(
                            self, name,
                            self.mW[i] / (np.sum(np.diag(self._W)[i:]) or 1.0))
                else:
                    for i in range(self.G - 1):
                        name = "inherit_F%d" % (i + 1)
                        setattr(self, name, my_kwargs.get(name, False))
                        name = "F%d" % (i + 1)
                        setattr(self, name,
                                not_none(my_kwargs.get(name, None), 0.8))

                self.update()
Esempio n. 2
0
 def do_async_evaluation(self, iter_func, eval_func=None, data_func=None, result_func=None):
     assert self.refiner is not None, "RefineAsyncHelper can only work when a refiner is set!"
     eval_func = not_none(eval_func, self.refiner.residual_callback)
     data_func = not_none(data_func, self.refiner.get_data_object)
     result_func = not_none(result_func, self.refiner.update)
     return super(RefineAsyncHelper, self).do_async_evaluation(
         iter_func, eval_func, data_func, result_func
     )
Esempio n. 3
0
    def __init__(self, W1=0.85, P1111_or_P2112=0.75, *args, **kwargs):
        super(R3G2Model, self).__init__(R=3, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.85)
            self.P1111_or_P2112 = not_none(P1111_or_P2112, 0.75)

            self.update()
Esempio n. 4
0
 def label(self):
     if self.display_stats_in_lbl and (self.project is not None and self.project.layout_mode == "FULL"):
         label = self.sample_name
         label += "\nRp = %.1f%%" % not_none(self.statistics.Rp, 0.0)
         label += "\nRwp = %.1f%%" % not_none(self.statistics.Rwp, 0.0)
         return label
     else:
         return self.sample_name
Esempio n. 5
0
 def get_label(self):
     label = self.sample_name
     if (self.project is not None and self.project.layout_mode == "FULL"):
         if self.display_stats_in_lbl:
             label += "\nR$_p$ = %.1f%%" % not_none(self.statistics.Rp, 0.0)
             label += "\nR$_{wp}$ = %.1f%%" % not_none(self.statistics.Rwp, 0.0)
         if self.display_residual_scale != 1.0:
             label += "\n\nResidual x%0.1f " % not_none(self.display_residual_scale, 1.0)
     return label
Esempio n. 6
0
 def label(self):
     if self.display_stats_in_lbl and (self.project is not None and
                                       self.project.layout_mode == "FULL"):
         label = self.sample_name
         label += "\nRp = %.1f%%" % not_none(self.statistics.Rp, 0.0)
         label += "\nRwp = %.1f%%" % not_none(self.statistics.Rwp, 0.0)
         return label
     else:
         return self.sample_name
 def __init__(self,
              treemodel_property_name=None,
              treemodel_class_type=None,
              *args,
              **kwargs):
     super(TreeModelMixin, self).__init__(*args, **kwargs)
     self.treemodel_property_name = not_none(treemodel_property_name,
                                             self.treemodel_property_name)
     self.treemodel_class_type = not_none(treemodel_class_type,
                                          self.treemodel_class_type)
Esempio n. 8
0
 def setup(self,
           W1=0.75,
           P11_or_P22=0.5,
           inherit_W1=False,
           inherit_P11_or_P22=False,
           **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.75)
     self.inherit_W1 = inherit_W1
     self.P11_or_P22 = not_none(P11_or_P22, 0.5)
     self.inherit_P11_or_P22 = inherit_P11_or_P22
Esempio n. 9
0
 def __init__(self, minimum, maximum, refine, *args, **kwargs):
     """
         Valid *positional* arguments for a RefinementInfo are:
             refine: whether or not the linked parameter is selected for refinement
             minimum: the minimum allowable value for the linked parameter
             maximum: the maximum allowable value for the linked parameter   
     """
     super(RefinementInfo, self).__init__()
     self.refine = refine
     self.minimum = not_none(minimum, 0.0)
     self.maximum = not_none(maximum, 1.0)
Esempio n. 10
0
 def get_label(self):
     label = self.sample_name
     if (self.project is not None and self.project.layout_mode == "FULL"):
         if self.display_stats_in_lbl:
             label += "\nR$_p$ = %.1f%%" % not_none(self.statistics.Rp, 0.0)
             label += "\nR$_{wp}$ = %.1f%%" % not_none(
                 self.statistics.Rwp, 0.0)
         if self.display_residual_scale != 1.0:
             label += "\n\nResidual x%0.1f " % not_none(
                 self.display_residual_scale, 1.0)
     return label
Esempio n. 11
0
 def setup(self, W1=0.75, P112_or_P211=0.75, P21=0.75, P122_or_P221=0.75,
         inherit_W1=False, inherit_P112_or_P211=False,
         inherit_P21=False, inherit_P122_or_P221=False, **kwargs):
     _AbstractProbability.setup(self, R=2)
     with self.data_changed.hold():
         self.W1 = not_none(W1, 0.75)
         self.inherit_W1 = inherit_W1
         self.P112_or_P211 = not_none(P112_or_P211, 0.75)
         self.inherit_P112_or_P211 = inherit_P112_or_P211
         self.P21 = not_none(P21, 0.75)
         self.inherit_P21 = inherit_P21
         self.P122_or_P221 = not_none(P122_or_P221, 0.75)
         self.inherit_P122_or_P221 = inherit_P122_or_P221
Esempio n. 12
0
    def create_gon_file(self):

        output = """        {
            "type": "Goniometer", 
            "properties": {
                "radius": %(radius)f, 
                "divergence": %(divergence)f, 
                "soller1": %(soller1)f, 
                "soller2": %(soller2)f, 
                "min_2theta": %(twotheta_min)f, 
                "max_2theta": %(twotheta_max)f, 
                "steps": %(twotheta_count)f, 
                "wavelength": %(alpha_average)f, 
                "has_ads": false, 
                "ads_fact": 1.0, 
                "ads_phase_fact": 1.0, 
                "ads_phase_shift": 0.0, 
                "ads_const": 0.0
            }
        }""" % dict(
            radius=float(not_none(self.radius, 25)),
            divergence=float(not_none(self.divergence, 0.5)),
            soller1=float(not_none(self.soller1, 2.5)),
            soller2=float(not_none(self.soller2, 2.5)),
            twotheta_min=float(not_none(self.twotheta_min, 3.0)),
            twotheta_max=float(not_none(self.twotheta_max, 45.0)),
            twotheta_count=float(not_none(self.twotheta_count, 2500)),
            alpha_average=float(not_none(self.alpha_average, 0.154056)),
        )
        f = StringIO(output)
        f.flush()
        return f
Esempio n. 13
0
    def create_gon_file(self):

        output = """        {
            "type": "Goniometer", 
            "properties": {
                "radius": %(radius)f, 
                "divergence": %(divergence)f, 
                "soller1": %(soller1)f, 
                "soller2": %(soller2)f, 
                "min_2theta": %(twotheta_min)f, 
                "max_2theta": %(twotheta_max)f, 
                "steps": %(twotheta_count)f, 
                "wavelength": %(alpha_average)f, 
                "has_ads": false, 
                "ads_fact": 1.0, 
                "ads_phase_fact": 1.0, 
                "ads_phase_shift": 0.0, 
                "ads_const": 0.0
            }
        }""" % dict(
            radius=float(not_none(self.radius, 25)),
            divergence=float(not_none(self.divergence, 0.5)),
            soller1=float(not_none(self.soller1, 2.5)),
            soller2=float(not_none(self.soller2, 2.5)),
            twotheta_min=float(not_none(self.twotheta_min, 3.0)),
            twotheta_max=float(not_none(self.twotheta_max, 45.0)),
            twotheta_count=float(not_none(self.twotheta_count, 2500)),
            alpha_average=float(not_none(self.alpha_average, 0.154056)),
        )

        return output
Esempio n. 14
0
        def setup(self, **kwargs):
            _AbstractProbability.setup(self, R=0)

            if self.G > 1 and "W1" in kwargs: # old-style model
                for i in range(self.G - 1):
                    name = "W%d" % (i + 1)
                    self.mW[i] = not_none(kwargs.get(name, None), 0.8)
                    name = "F%d" % (i + 1)
                    setattr(self, name, self.mW[i] / (np.sum(np.diag(self._W)[i:]) or 1.0))
            else:
                for i in range(self.G - 1):
                    name = "inherit_F%d" % (i + 1)
                    setattr(self, name, kwargs.get(name, False))
                    name = "F%d" % (i + 1)
                    setattr(self, name, not_none(kwargs.get(name, None), 0.8))
Esempio n. 15
0
    def __init__(self, W1=0.75, P112_or_P211=0.75, P21=0.75, P122_or_P221=0.75,
            inherit_W1=False, inherit_P112_or_P211=False,
            inherit_P21=False, inherit_P122_or_P221=False, *args, **kwargs):
        super(R2G2Model, self).__init__(R=2, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.75)
            self.inherit_W1 = inherit_W1
            self.P112_or_P211 = not_none(P112_or_P211, 0.75)
            self.inherit_P112_or_P211 = inherit_P112_or_P211
            self.P21 = not_none(P21, 0.75)
            self.inherit_P21 = inherit_P21
            self.P122_or_P221 = not_none(P122_or_P221, 0.75)
            self.inherit_P122_or_P221 = inherit_P122_or_P221

            self.update()
Esempio n. 16
0
        def setup(self, **kwargs):
            _AbstractProbability.setup(self, R=0)

            if self.G > 1 and "W1" in kwargs:  # old-style model
                for i in range(self.G - 1):
                    name = "W%d" % (i + 1)
                    self.mW[i] = not_none(kwargs.get(name, None), 0.8)
                    name = "F%d" % (i + 1)
                    setattr(self, name,
                            self.mW[i] / (np.sum(np.diag(self._W)[i:]) or 1.0))
            else:
                for i in range(self.G - 1):
                    name = "inherit_F%d" % (i + 1)
                    setattr(self, name, kwargs.get(name, False))
                    name = "F%d" % (i + 1)
                    setattr(self, name, not_none(kwargs.get(name, None), 0.8))
Esempio n. 17
0
    def __init__(self,
                 W1=0.75,
                 P11_or_P22=0.5,
                 inherit_W1=False,
                 inherit_P11_or_P22=False,
                 *args,
                 **kwargs):
        super(R1G2Model, self).__init__(R=1, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.75)
            self.inherit_W1 = inherit_W1
            self.P11_or_P22 = not_none(P11_or_P22, 0.5)
            self.inherit_P11_or_P22 = inherit_P11_or_P22

            self.update()
Esempio n. 18
0
 def update_refinement_info(self,
                            current_rp=None,
                            message=None,
                            server_status=None):
     if not isnan(current_rp):
         self["current_residual"].set_text("%.2f" % current_rp)
     self["message"].set_text(not_none(message, ""))
     self.update_server_status(server_status)
Esempio n. 19
0
 def toggle_renderer(column, cell, model, itr, data=None):
     try:
         col = column.get_col_attr("active")
         value = model.get_value(itr, col)
         cell.set_property('active', not_none(value, False))
     except TypeError:
         if settings.DEBUG: raise
         pass
Esempio n. 20
0
 def toggle_renderer(column, cell, model, itr, data=None):
     try:
         col = column.get_col_attr("active")
         value = model.get_value(itr, col)
         cell.set_property('active', not_none(value, False))
     except TypeError:
         if settings.DEBUG: raise
         pass
Esempio n. 21
0
 def on_accept(phase_type, G, R):
     index = int(not_none(self.get_selected_index(), -1)) + 1
     if phase_type == "empty":
         self.add_object(Phase(G=int(G), R=int(R)))
     elif phase_type == "raw":
         self.add_object(RawPatternPhase())
     else:
         filename = phase_type
         self.model.load_phases(filename, insert_index=index)
Esempio n. 22
0
        def __init__(self, *args, **kwargs):
            my_kwargs = self.pop_kwargs(kwargs, *[prop.label for prop in self.Meta.get_local_persistent_properties()])
            super(_BaseR0Model, self).__init__(R=0, *args, **kwargs)

            with self.data_changed.hold():
                if self.G > 1 and "W1" in my_kwargs: # old-style model
                    for i in range(self.G - 1):
                        name = "W%d" % (i + 1)
                        self.mW[i] = not_none(my_kwargs.get(name, None), 0.8)
                        name = "F%d" % (i + 1)
                        setattr(self, name, self.mW[i] / (np.sum(np.diag(self._W)[i:]) or 1.0))
                else:
                    for i in range(self.G - 1):
                        name = "inherit_F%d" % (i + 1)
                        setattr(self, name, my_kwargs.get(name, False))
                        name = "F%d" % (i + 1)
                        setattr(self, name, not_none(my_kwargs.get(name, None), 0.8))

                self.update()
Esempio n. 23
0
 def load_phases(self, filename, insert_index=None):
     """
     Loads all :class:`~pyxrd.phase.models.Phase` objects from the file
     'filename'. An optional index can be given where the phases need to be
     inserted at.
     """
     insert_index = not_none(insert_index, 0)
     for phase in Phase.load_phases(filename, parent=self):
         self.phases.insert(insert_index, phase)
         insert_index += 1
Esempio n. 24
0
 def on_accept(phase_type, G, R):
     index = int(not_none(self.get_selected_index(), -1)) + 1
     if phase_type == "empty":
         self.add_object(Phase(G=int(G), R=int(R)))
     elif phase_type == "raw":
         self.add_object(RawPatternPhase())
     else:
         filename = phase_type
         if filename != None:
             self.model.load_phases(filename,
                                    parser=JSONParser,
                                    insert_index=index)
Esempio n. 25
0
 def setup(self,
           W1=0.6,
           P11_or_P22=0.3,
           G1=0.5,
           G2=0.4,
           G3=0.5,
           G4=0.2,
           inherit_W1=False,
           inherit_P11_or_P22=False,
           inherit_G1=False,
           inherit_G2=False,
           inherit_G3=False,
           inherit_G4=False,
           **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.8)
     self.inherit_W1 = bool(inherit_W1)
     self.P11_or_P22 = not_none(P11_or_P22, 0.7)
     self.inherit_P11_or_P22 = bool(inherit_P11_or_P22)
     self.G1 = not_none(G1, 0.7)
     self.inherit_G1 = bool(inherit_G1)
     self.G2 = not_none(G2, 0.7)
     self.inherit_G2 = bool(inherit_G2)
     self.G3 = not_none(G3, 0.7)
     self.inherit_G3 = bool(inherit_G3)
     self.G4 = not_none(G4, 0.7)
     self.inherit_G4 = bool(inherit_G4)
Esempio n. 26
0
    def __init__(self,
                 W1=0.8,
                 P11_or_P22=0.7,
                 G1=0.7,
                 G2=0.7,
                 G3=0.7,
                 G4=0.7,
                 inherit_W1=False,
                 inherit_P11_or_P22=False,
                 inherit_G1=False,
                 inherit_G2=False,
                 inherit_G3=False,
                 inherit_G4=False,
                 *args,
                 **kwargs):
        super(R1G3Model, self).__init__(R=1, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.8)
            self.inherit_W1 = bool(inherit_W1)
            self.P11_or_P22 = not_none(P11_or_P22, 0.7)
            self.inherit_P11_or_P22 = bool(inherit_P11_or_P22)
            self.G1 = not_none(G1, 0.7)
            self.inherit_G1 = bool(inherit_G1)
            self.G2 = not_none(G2, 0.7)
            self.inherit_G2 = bool(inherit_G2)
            self.G3 = not_none(G3, 0.7)
            self.inherit_G3 = bool(inherit_G3)
            self.G4 = not_none(G4, 0.7)
            self.inherit_G4 = bool(inherit_G4)

            self.update()
Esempio n. 27
0
    def __init__(self,
                 W1=0.8,
                 P111_or_P212=0.9,
                 G1=0.9,
                 G2=0.9,
                 G3=0.9,
                 G4=0.9,
                 inherit_W1=False,
                 inherit_P111_or_P212=False,
                 inherit_G1=False,
                 inherit_G2=False,
                 inherit_G3=False,
                 inherit_G4=False,
                 *args,
                 **kwargs):
        super(R2G3Model, self).__init__(R=2, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.8)
            self.inherit_W1 = inherit_W1
            self.P111_or_P212 = not_none(P111_or_P212, 0.9)
            self.inherit_P111_or_P212 = inherit_P111_or_P212
            self.G1 = not_none(G1, 0.9)
            self.inherit_G1 = inherit_G1
            self.G2 = not_none(G2, 0.9)
            self.inherit_G2 = inherit_G2
            self.G3 = not_none(G3, 0.9)
            self.inherit_G3 = inherit_G3
            self.G4 = not_none(G4, 0.9)
            self.inherit_G4 = inherit_G4

            self.update()
Esempio n. 28
0
 def load_phases(self, filename, parser, insert_index=0):
     """
     Loads all :class:`~pyxrd.phase.models.Phase` objects from the file
     'filename'. An optional index can be given where the phases need to be
     inserted at.
     """
     # make sure we have no duplicate UUID's
     insert_index = not_none(insert_index, 0)
     type(Project).object_pool.change_all_uuids()
     for phase in parser.parse(filename):
         phase.parent = self
         self.phases.insert(insert_index, phase)
         insert_index += 1
Esempio n. 29
0
 def load_phases(self, filename, parser, insert_index=0):
     """
     Loads all :class:`~pyxrd.phase.models.Phase` objects from the file
     'filename'. An optional index can be given where the phases need to be
     inserted at.
     """
     # make sure we have no duplicate UUID's
     insert_index = not_none(insert_index, 0)
     type(Project).object_pool.change_all_uuids()
     for phase in parser.parse(filename):
         phase.parent = self
         self.phases.insert(insert_index, phase)
         insert_index += 1
Esempio n. 30
0
    def __init__(self,
                 W1=0.75,
                 P112_or_P211=0.75,
                 P21=0.75,
                 P122_or_P221=0.75,
                 inherit_W1=False,
                 inherit_P112_or_P211=False,
                 inherit_P21=False,
                 inherit_P122_or_P221=False,
                 *args,
                 **kwargs):
        super(R2G2Model, self).__init__(R=2, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.75)
            self.inherit_W1 = inherit_W1
            self.P112_or_P211 = not_none(P112_or_P211, 0.75)
            self.inherit_P112_or_P211 = inherit_P112_or_P211
            self.P21 = not_none(P21, 0.75)
            self.inherit_P21 = inherit_P21
            self.P122_or_P221 = not_none(P122_or_P221, 0.75)
            self.inherit_P122_or_P221 = inherit_P122_or_P221

            self.update()
Esempio n. 31
0
    def from_experimental_data(filename, parent, parser=None):
        """
            Returns a list of new :class:`~.specimen.models.Specimen`'s loaded
            from `filename`, setting thjeir parent to `parent` using the given
            parser or :class:`~.generic.io.xrd_parsers.XRDParser` if none passed.
        """
        specimens = list()
        parser = not_none(parser, XRDParser)
        xrdfiles = parser.parse(filename)
        for xrdfile in xrdfiles:
            name, sample, generator = xrdfile.filename, xrdfile.name, xrdfile.data
            specimen = Specimen(parent=parent, name=name, sample_name=sample)
            specimen.experimental_pattern.load_data_from_generator(generator, clear=True)
            specimen.goniometer.reset_from_file(None, data=xrdfile.create_gon_file())
            specimens.append(specimen)

        return specimens
Esempio n. 32
0
    def get_kwarg(self, fun_kwargs, default, *keywords):
        """
        Convenience function to get a certain keyword 'kw' value from the passed
        keyword arguments 'fun_kwargs'. If the key 'kw' is not in 'fun_kwargs'
        a list of deprecated keywords to be searched for can be passed as an
        optional argument list 'depr_kws'. If one of these is found, its value
        is returned and a deprecation warning is emitted. 
        If neither the 'kw' nor any of the 'depr_kws' are found the 'default'
        value is returned. 
        """
        if len(keywords) < 1:
            raise AttributeError, "get_kwarg() requires at least one keyword (%d given)" % (len(keywords))

        value = default
        for i, key in enumerate(keywords[::-1]):
            if key in fun_kwargs:
                value = not_none(fun_kwargs[key], default)
                if i != 0:
                    warn("The use of the keyword '%s' is deprecated for %s!" %
                        (key, type(self)), DeprecationWarning)
        return value
Esempio n. 33
0
 def setup(self, W1=0.6, P11_or_P22=0.3, G1=0.5, G2=0.4, G3=0.5, G4=0.2,
         inherit_W1=False, inherit_P11_or_P22=False, inherit_G1=False,
         inherit_G2=False, inherit_G3=False, inherit_G4=False, **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.8)
     self.inherit_W1 = bool(inherit_W1)
     self.P11_or_P22 = not_none(P11_or_P22, 0.7)
     self.inherit_P11_or_P22 = bool(inherit_P11_or_P22)
     self.G1 = not_none(G1, 0.7)
     self.inherit_G1 = bool(inherit_G1)
     self.G2 = not_none(G2, 0.7)
     self.inherit_G2 = bool(inherit_G2)
     self.G3 = not_none(G3, 0.7)
     self.inherit_G3 = bool(inherit_G3)
     self.G4 = not_none(G4, 0.7)
     self.inherit_G4 = bool(inherit_G4)
Esempio n. 34
0
 def setup(self, W1=0.8, P111_or_P212=0.9, G1=0.9, G2=0.9, G3=0.9, G4=0.9,
     inherit_W1=False, inherit_P111_or_P212=False, inherit_G1=False,
     inherit_G2=False, inherit_G3=False, inherit_G4=False, **kwargs):
     _AbstractProbability.setup(self, R=2)
     self.W1 = not_none(W1, 0.8)
     self.inherit_W1 = inherit_W1
     self.P111_or_P212 = not_none(P111_or_P212, 0.9)
     self.inherit_P111_or_P212 = inherit_P111_or_P212
     self.G1 = not_none(G1, 0.9)
     self.inherit_G1 = inherit_G1
     self.G2 = not_none(G2, 0.9)
     self.inherit_G2 = inherit_G2
     self.G3 = not_none(G3, 0.9)
     self.inherit_G3 = inherit_G3
     self.G4 = not_none(G4, 0.9)
     self.inherit_G4 = inherit_G4
Esempio n. 35
0
    def get_kwarg(self, fun_kwargs, default, *keywords):
        """
        Convenience function to get a certain keyword 'kw' value from the passed
        keyword arguments 'fun_kwargs'. If the key 'kw' is not in 'fun_kwargs'
        a list of deprecated keywords to be searched for can be passed as an
        optional argument list 'depr_kws'. If one of these is found, its value
        is returned and a deprecation warning is emitted. 
        If neither the 'kw' nor any of the 'depr_kws' are found the 'default'
        value is returned. 
        """
        if len(keywords) < 1:
            raise AttributeError, "get_kwarg() requires at least one keyword (%d given)" % (
                len(keywords))

        value = default
        for i, key in enumerate(keywords[::-1]):
            if key in fun_kwargs:
                value = not_none(fun_kwargs[key], default)
                if i != 0:
                    warn(
                        "The use of the keyword '%s' is deprecated for %s!" %
                        (key, type(self)), DeprecationWarning)
        return value
Esempio n. 36
0
    def __init__(self, W1=0.8, P111_or_P212=0.9, G1=0.9, G2=0.9, G3=0.9, G4=0.9,
        inherit_W1=False, inherit_P111_or_P212=False, inherit_G1=False,
        inherit_G2=False, inherit_G3=False, inherit_G4=False, *args, **kwargs):
        super(R2G3Model, self).__init__(R=2, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.8)
            self.inherit_W1 = inherit_W1
            self.P111_or_P212 = not_none(P111_or_P212, 0.9)
            self.inherit_P111_or_P212 = inherit_P111_or_P212
            self.G1 = not_none(G1, 0.9)
            self.inherit_G1 = inherit_G1
            self.G2 = not_none(G2, 0.9)
            self.inherit_G2 = inherit_G2
            self.G3 = not_none(G3, 0.9)
            self.inherit_G3 = inherit_G3
            self.G4 = not_none(G4, 0.9)
            self.inherit_G4 = inherit_G4

            self.update()
Esempio n. 37
0
    def parse(cls, fp, data_objects=None, close=False):
        filename, fp, close = cls._get_file(fp, close=close)

        try:
            basename = os.path.basename(filename)
        except AttributeError:
            basename = None

        num_samples = 0

        zipinfos = fp.infolist()

        processed_folders = []

        data_objects = not_none(data_objects, [])

        for zipinfo in zipinfos:
            if zipinfo.filename.count('/') == 1 and "DataContainer.xml" in zipinfo.filename:

                folder = os.path.dirname(zipinfo.filename)
                if not folder in processed_folders:

                    processed_folders.append(folder)

                    header_d = cls._get_header_dict(fp, folder)
                    raw_data_files, sample_name = cls._get_raw_data_files(fp, folder)

                    for raw_data_filename in raw_data_files:
                        contf = fp.open(raw_data_filename)

                        _, root = cls.get_xml_for_file(contf)

                        for route in root.findall("./DataRoutes/DataRoute"):

                            # Adapt XRDFile list & get last addition:
                            data_objects = cls._adapt_data_object_list(
                                data_objects,
                                num_samples=(num_samples + 1),
                                only_extend=True
                            )
                            data_object = data_objects[num_samples]

                            # Get the Datum tags:
                            datums = route.findall("Datum")
                            data = []

                            # Parse the RawDataView tags to find out what index in
                            # the datum is used for what type of data:
                            enabled_datum_index = None
                            twotheta_datum_index = None
                            intensity_datum_index = None
                            steptime_datum_index = None
                            for dataview in route.findall("./DataViews/RawDataView"):
                                index = int(dataview.get("Start", 0))
                                name = dataview.get("LogicName", default="Undefined")
                                xsi_type = dataview.get("{http://www.w3.org/2001/XMLSchema-instance}type", default="Undefined")
                                if name == "MeasuredTime":
                                    steptime_datum_index = index
                                elif name == "AbsorptionFactor":
                                    enabled_datum_index = index
                                elif name == "Undefined" and xsi_type == "VaryingRawDataView":
                                    for i, definition in enumerate(dataview.findall("./Varying/FieldDefinitions")):
                                        if definition.get("TwoTheta"):
                                            index += i
                                            break
                                    twotheta_datum_index = index
                                elif name == "Undefined" and xsi_type == "RecordedRawDataView":
                                    intensity_datum_index = index

                            # Parse the SubScanInfo list (usually only one), and
                            # then parse the datums accordingly
                            twotheta_min = None
                            twotheta_max = None
                            twotheta_count = 0
                            for subscan in route.findall("./SubScans/SubScanInfo"):
                                # Get the steps, where to start and the planned
                                # time per step (measuredTimePerStep deviates
                                # if the recording was interrupted):
                                steps = int(subscan.get("MeasuredSteps"))
                                start = int(subscan.get("StartStepNo"))
                                steptime = float(subscan.get("PlannedTimePerStep"))

                                for datum in datums[start:start + steps]:
                                    values = datum.text.split(",")
                                    if values[enabled_datum_index] == "1":
                                        # Fetch values from the list:
                                        datum_steptime = float(values[steptime_datum_index])
                                        intensity = float(values[intensity_datum_index])
                                        intensity /= float(steptime * datum_steptime)
                                        twotheta = float(values[twotheta_datum_index])

                                        # Keep track of min 2theta:
                                        if twotheta_min is None:
                                            twotheta_min = twotheta
                                        else:
                                            twotheta_min = min(twotheta_min, twotheta)

                                        # Keep track of max 2theta:
                                        if twotheta_max is None:
                                            twotheta_max = twotheta
                                        else:
                                            twotheta_max = min(twotheta_max, twotheta)

                                        # Append point and increase count:
                                        data.append([twotheta, intensity])
                                        twotheta_count += 1

                            #Update header:
                            data_object.update(
                                filename=basename,
                                name=sample_name,
                                time_step=1, # we converted to CPS
                                twotheta_min=twotheta_min,
                                twotheta_max=twotheta_max,
                                twotheta_count=twotheta_count,
                                **header_d
                            )

                            data_object.data = data

                            num_samples += 1

                        #end for
                        contf.close()
                        
                    #end for
                #end if
            #end if
        #end for

        if close: fp.close()
        return data_objects
Esempio n. 38
0
 def setup(self,
           W1=0.6,
           P11_or_P22=0.25,
           R1=0.5,
           R2=0.5,
           G1=0.5,
           G2=0.4,
           G11=0.5,
           G12=0.2,
           G21=0.8,
           G22=0.75,
           G31=0.7,
           G32=0.5,
           inherit_W1=False,
           inherit_P11_or_P22=False,
           inherit_R1=False,
           inherit_R2=False,
           inherit_G1=False,
           inherit_G2=False,
           inherit_G11=False,
           inherit_G12=False,
           inherit_G21=False,
           inherit_G22=False,
           inherit_G31=False,
           inherit_G32=False,
           **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.6)
     self.inherit_W1 = inherit_W1
     self.P11_or_P22 = not_none(P11_or_P22, 0.25)
     self.inherit_P11_or_P22 = inherit_P11_or_P22
     self.R1 = not_none(R1, 0.5)
     self.inherit_R1 = inherit_R1
     self.R2 = not_none(R2, 0.5)
     self.inherit_R2 = inherit_R2
     self.G1 = not_none(G1, 0.5)
     self.inherit_G1 = inherit_G1
     self.G2 = not_none(G2, 0.4)
     self.inherit_G2 = inherit_G2
     self.G11 = not_none(G11, 0.5)
     self.inherit_G11 = inherit_G11
     self.G12 = not_none(G12, 0.2)
     self.inherit_G12 = inherit_G12
     self.G21 = not_none(G21, 0.8)
     self.inherit_G21 = inherit_G21
     self.G22 = not_none(G22, 0.75)
     self.inherit_G22 = inherit_G22
     self.G31 = not_none(G31, 0.7)
     self.inherit_G31 = inherit_G31
     self.G32 = not_none(G32, 0.5)
     self.inherit_G32 = inherit_G32
Esempio n. 39
0
 def setup(self, W1=0.85, P1111_or_P2112=0.75, **kwargs):
     _AbstractProbability.setup(self, R=3)
     self.W1 = not_none(W1, 0.85)
     self.P1111_or_P2112 = not_none(P1111_or_P2112, 0.75)
Esempio n. 40
0
    def get_best_threshold(self, max_threshold=None, steps=None):
        """
            Estimates the best threshold for peak detection using an
            iterative algorithm. Assumes there is a linear contribution from noise.
            Returns a 4-tuple containing the selected threshold, the maximum
            threshold, a list of threshold values and a list with the corresponding
            number of peaks.
        """
        length = self.data_x.size
        steps = not_none(steps, 20)
        threshold = 0.1
        max_threshold = not_none(max_threshold, threshold * 3.2)

        def get_new_threshold(threshold, deltas, num_peaks, ln):
            # Left side line:
            x = deltas[:ln]
            y = num_peaks[:ln]
            slope, intercept, R, _, _ = stats.linregress(x, y)
            return R, -intercept / slope

        if length > 2:
            # Adjust the first distribution:
            deltas, num_peaks = self.calculate_npeaks_for(max_threshold, steps)

            #  Fit several lines with increasing number of points from the
            #  generated threshold / marker count graph. Stop when the
            #  R-coefficiënt drops below 0.95 (past linear increase from noise)
            #  Then repeat this by increasing the resolution of data points
            #  and continue until the result does not change anymore

            last_threshold = None
            solution = False
            max_iters = 10
            min_iters = 3
            itercount = 0
            while not solution:
                # Number of points to use for the lin regress:
                ln = 4
                # Maximum number of points to use:
                max_ln = len(deltas)
                # Flag indicating if we can stop searching for the linear part
                stop = False
                while not stop:
                    R, threshold = get_new_threshold(threshold, deltas,
                                                     num_peaks, ln)
                    max_threshold = threshold * 3.2
                    if abs(R) < 0.98 or ln >= max_ln:
                        stop = True
                    else:
                        ln += 1
                itercount += 1  # Increase # of iterations
                if last_threshold:
                    # Check if we have run at least `min_iters`, at most `max_iters`
                    # and have not reached an equilibrium.
                    solution = bool(
                        itercount > min_iters
                        and not (itercount <= max_iters
                                 and last_threshold - threshold >= 0.001))
                    if not solution:
                        deltas, num_peaks = self.calculate_npeaks_for(
                            max_threshold, steps)
                last_threshold = threshold

            return (deltas, num_peaks), threshold, max_threshold
        else:
            return ([], []), threshold, max_threshold
Esempio n. 41
0
 def save_data(self, parser, filename, **kwargs):
     if self.data_y.shape[1] > 1:
         kwargs["header"] = ["2θ", ] + (not_none(self.y_names, []))
     parser.write(filename, self.data_x, self._data_y.transpose(), **kwargs)
Esempio n. 42
0
 def setup(self, W1=0.6, P11_or_P22=0.25, R1=0.5, R2=0.5, G1=0.5, G2=0.4,
         G11=0.5, G12=0.2, G21=0.8, G22=0.75, G31=0.7, G32=0.5,
         inherit_W1=False, inherit_P11_or_P22=False, inherit_R1=False,
         inherit_R2=False, inherit_G1=False, inherit_G2=False,
         inherit_G11=False, inherit_G12=False, inherit_G21=False,
         inherit_G22=False, inherit_G31=False, inherit_G32=False, **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.6)
     self.inherit_W1 = inherit_W1
     self.P11_or_P22 = not_none(P11_or_P22, 0.25)
     self.inherit_P11_or_P22 = inherit_P11_or_P22
     self.R1 = not_none(R1, 0.5)
     self.inherit_R1 = inherit_R1
     self.R2 = not_none(R2, 0.5)
     self.inherit_R2 = inherit_R2
     self.G1 = not_none(G1, 0.5)
     self.inherit_G1 = inherit_G1
     self.G2 = not_none(G2, 0.4)
     self.inherit_G2 = inherit_G2
     self.G11 = not_none(G11, 0.5)
     self.inherit_G11 = inherit_G11
     self.G12 = not_none(G12, 0.2)
     self.inherit_G12 = inherit_G12
     self.G21 = not_none(G21, 0.8)
     self.inherit_G21 = inherit_G21
     self.G22 = not_none(G22, 0.75)
     self.inherit_G22 = inherit_G22
     self.G31 = not_none(G31, 0.7)
     self.inherit_G31 = inherit_G31
     self.G32 = not_none(G32, 0.5)
     self.inherit_G32 = inherit_G32
Esempio n. 43
0
 def update_refinement_info(self, current_rp=None, message=None):
     if not isnan(current_rp):
         self["current_residual"].set_text("%.2f" % current_rp)
     self["message"].set_text(not_none(message, ""))
Esempio n. 44
0
 def setup(self, W1=0.75, P11_or_P22=0.5, inherit_W1=False, inherit_P11_or_P22=False, **kwargs):
     _AbstractProbability.setup(self, R=1)
     self.W1 = not_none(W1, 0.75)
     self.inherit_W1 = inherit_W1
     self.P11_or_P22 = not_none(P11_or_P22, 0.5)
     self.inherit_P11_or_P22 = inherit_P11_or_P22
Esempio n. 45
0
    def get_best_threshold(self, max_threshold=None, steps=None, status_dict=None):
        """
            Estimates the best threshold for peak detection using an
            iterative algorithm. Assumes there is a linear contribution from noise.
            Returns a 4-tuple containing the selected threshold, the maximum
            threshold, a list of threshold values and a list with the corresponding
            number of peaks.
        """
        length = self.data_x.size
        steps = not_none(steps, 20)
        threshold = 0.1
        max_threshold = not_none(max_threshold, threshold * 3.2)

        def get_new_threshold(threshold, deltas, num_peaks, ln):
            # Left side line:
            x = deltas[:ln]
            y = num_peaks[:ln]
            slope, intercept, R, _, _ = stats.linregress(x, y)
            return R, -intercept / slope

        if length > 2:
            # Adjust the first distribution:
            deltas, num_peaks = self.calculate_npeaks_for(max_threshold, steps)

            #  Fit several lines with increasing number of points from the
            #  generated threshold / marker count graph. Stop when the
            #  R-coefficiënt drops below 0.95 (past linear increase from noise)
            #  Then repeat this by increasing the resolution of data points
            #  and continue until the result does not change anymore

            last_threshold = None
            solution = False
            max_iters = 10
            min_iters = 3
            itercount = 0
            if status_dict is not None:
                status_dict["progress"] = 0

            while not solution:
                # Number of points to use for the lin regress:
                ln = 4
                # Maximum number of points to use:
                max_ln = len(deltas)
                # Flag indicating if we can stop searching for the linear part
                stop = False
                while not stop:
                    R, threshold = get_new_threshold(threshold, deltas, num_peaks, ln)
                    max_threshold = threshold * 3.2
                    if abs(R) < 0.98 or ln >= max_ln:
                        stop = True
                    else:
                        ln += 1
                itercount += 1 # Increase # of iterations
                if last_threshold:
                    # Check if we have run at least `min_iters`, at most `max_iters`
                    # and have not reached an equilibrium.
                    solution = bool(
                        itercount > min_iters and not
                        (
                            itercount <= max_iters and
                            last_threshold - threshold >= 0.001
                        )
                    )
                    if not solution:
                        deltas, num_peaks = self.calculate_npeaks_for(max_threshold, steps)
                last_threshold = threshold
                if status_dict is not None:
                    status_dict["progress"] = float(itercount / max_iters)

            return (deltas, num_peaks), threshold, max_threshold
        else:
            return ([], []), threshold, max_threshold
Esempio n. 46
0
 def setup(self, W1=0.85, P1111_or_P2112=0.75, **kwargs):
     _AbstractProbability.setup(self, R=3)
     self.W1 = not_none(W1, 0.85)
     self.P1111_or_P2112 = not_none(P1111_or_P2112, 0.75)
Esempio n. 47
0
    def parse(cls, fp, data_objects=None, close=False):
        filename, fp, close = cls._get_file(fp, close=close)

        try:
            basename = os.path.basename(filename)
        except AttributeError:
            basename = None

        num_samples = 0

        zipinfos = fp.infolist()

        processed_folders = []

        data_objects = not_none(data_objects, [])

        for zipinfo in zipinfos:
            if zipinfo.filename.count('/') == 1 and "DataContainer.xml" in zipinfo.filename:

                folder = os.path.dirname(zipinfo.filename)
                if not folder in processed_folders:

                    processed_folders.append(folder)

                    header_d = cls._get_header_dict(fp, folder)
                    raw_data_files, sample_name = cls._get_raw_data_files(fp, folder)
               
                    for raw_data_filename in raw_data_files:
                        contf = fp.open(raw_data_filename)
                        
                        _, root = cls.get_xml_for_file(contf)
                        
                        isScan = not ("NonAmbientModeData" in root.find("./DataRoutes/DataRoute/ScanInformation").get("ScanName"))

                        if isScan:
                            for route in root.findall("./DataRoutes/DataRoute"):
                                # Adapt XRDFile list & get last addition:
                                data_objects = cls._adapt_data_object_list(
                                    data_objects,
                                    num_samples=(num_samples + 1),
                                    only_extend=True
                                )
                                data_object = data_objects[num_samples]
    
                                # Get the Datum tags:
                                datums = route.findall("Datum")
                                data = []
    
                                # Parse the RawDataView tags to find out what index in
                                # the datum is used for what type of data:
                                enabled_datum_index = None
                                twotheta_datum_index = None
                                intensity_datum_index = None
                                steptime_datum_index = None
                                relative_humidity_data, relative_humidity_index = None, None
                                temperature_data, temperature_index = None, None
                                temperature_index = None
                                for dataview in route.findall("./DataViews/RawDataView"):
                                    index = int(dataview.get("Start", 0))
                                    name = dataview.get("LogicName") or "Undefined"
                                    xsi_type = dataview.get("{http://www.w3.org/2001/XMLSchema-instance}type") or "Undefined"
                                    if name == "MeasuredTime":
                                        steptime_datum_index = index
                                    elif name == "AbsorptionFactor":
                                        enabled_datum_index = index
                                    elif name == "Undefined" and xsi_type == "VaryingRawDataView":
                                        for i, definition in enumerate(dataview.findall("./Varying/FieldDefinitions")):
                                            if definition.get("TwoTheta"):
                                                index += i
                                                break
                                        twotheta_datum_index = index
                                    elif name == "Undefined" and xsi_type == "RecordedRawDataView":
                                        logic_name = dataview.find("Recording").get("LogicName")
                                        if logic_name == "ScanCounter":
                                            intensity_datum_index = index
                                        elif logic_name == "modeActualHum":
                                            relative_humidity_index = index
                                            relative_humidity_data = []
                                        elif logic_name == "modeActualTemp":
                                            temperature_index = index
                                            temperature_data = []
                                            
                                # Parse the SubScanInfo list (usually only one), and
                                # then parse the datums accordingly
                                twotheta_min = None
                                twotheta_max = None
                                twotheta_count = 0
                                for subscan in route.findall("./SubScans/SubScanInfo"):
                                    # Get the steps, where to start and the planned
                                    # time per step (measuredTimePerStep deviates
                                    # if the recording was interrupted):
                                    steps = int(subscan.get("MeasuredSteps"))
                                    start = int(subscan.get("StartStepNo"))
                                    steptime = float(subscan.get("PlannedTimePerStep"))
    
                                    for datum in datums[start:start + steps]:
                                        values = datum.text.split(",")
                                        if values[enabled_datum_index] == "1":
                                            # Fetch values from the list:
                                            datum_steptime = float(values[steptime_datum_index])
                                            intensity = float(values[intensity_datum_index])
                                            intensity /= float(steptime * datum_steptime)
                                            twotheta = float(values[twotheta_datum_index])
                                            
                                            # If we have temperature or RH data, get them as well:
                                            if temperature_index is not None:
                                                temperature = float(values[temperature_index])
                                                temperature_data.append(temperature) 
                                            if relative_humidity_index is not None:
                                                relative_humidity = float(values[relative_humidity_index])
                                                relative_humidity_data.append(relative_humidity)                                        
    
                                            # Keep track of min 2theta:
                                            if twotheta_min is None:
                                                twotheta_min = twotheta
                                            else:
                                                twotheta_min = min(twotheta_min, twotheta)
    
                                            # Keep track of max 2theta:
                                            if twotheta_max is None:
                                                twotheta_max = twotheta
                                            else:
                                                twotheta_max = min(twotheta_max, twotheta)
    
                                            # Append point and increase count:
                                            data.append([twotheta, intensity])
                                            twotheta_count += 1
    
                                #Update header:
                                data_object.update(
                                    filename=basename,
                                    name=sample_name,
                                    time_step=1, # we converted to CPS
                                    twotheta_min=twotheta_min,
                                    twotheta_max=twotheta_max,
                                    twotheta_count=twotheta_count,
                                    **header_d
                                )
    
                                data_object.data = data
                                
                                # These might be None:
                                data_object.temperature_data = temperature_data
                                data_object.relative_humidity_data = relative_humidity_data
    
                                num_samples += 1

                        #end for
                        contf.close()
                        
                    #end for
                #end if
            #end if
        #end for

        if close: fp.close()
        return data_objects
Esempio n. 48
0
    def __init__(self,
                 W1=0.6,
                 P11_or_P22=0.25,
                 R1=0.5,
                 R2=0.5,
                 G1=0.5,
                 G2=0.4,
                 G11=0.5,
                 G12=0.5,
                 G21=0.8,
                 G22=0.75,
                 G31=0.7,
                 G32=0.5,
                 inherit_W1=False,
                 inherit_P11_or_P22=False,
                 inherit_R1=False,
                 inherit_R2=False,
                 inherit_G1=False,
                 inherit_G2=False,
                 inherit_G11=False,
                 inherit_G12=False,
                 inherit_G21=False,
                 inherit_G22=False,
                 inherit_G31=False,
                 inherit_G32=False,
                 *args,
                 **kwargs):
        super(R1G4Model, self).__init__(R=1, *args, **kwargs)

        with self.data_changed.hold():
            self.W1 = not_none(W1, 0.6)
            self.inherit_W1 = inherit_W1
            self.P11_or_P22 = not_none(P11_or_P22, 0.25)
            self.inherit_P11_or_P22 = inherit_P11_or_P22
            self.R1 = not_none(R1, 0.5)
            self.inherit_R1 = inherit_R1
            self.R2 = not_none(R2, 0.5)
            self.inherit_R2 = inherit_R2
            self.G1 = not_none(G1, 0.5)
            self.inherit_G1 = inherit_G1
            self.G2 = not_none(G2, 0.4)
            self.inherit_G2 = inherit_G2
            self.G11 = not_none(G11, 0.5)
            self.inherit_G11 = inherit_G11
            self.G12 = not_none(G12, 0.5)
            self.inherit_G12 = inherit_G12
            self.G21 = not_none(G21, 0.8)
            self.inherit_G21 = inherit_G21
            self.G22 = not_none(G22, 0.75)
            self.inherit_G22 = inherit_G22
            self.G31 = not_none(G31, 0.7)
            self.inherit_G31 = inherit_G31
            self.G32 = not_none(G32, 0.5)
            self.inherit_G32 = inherit_G32

            self.update()
 def __init__(self, treemodel_property_name=None, treemodel_class_type=None, *args, **kwargs):
     super(TreeModelMixin, self).__init__(*args, **kwargs)
     self.treemodel_property_name = not_none(treemodel_property_name, self.treemodel_property_name)
     self.treemodel_class_type = not_none(treemodel_class_type, self.treemodel_class_type)
 def __init__(self, *args, **kwargs):
     self.title = not_none(kwargs.pop("title", None), self.title)
     super(ObjectListStoreController, self).__init__(*args, **kwargs)
Esempio n. 51
0
 def save_data(self, parser, filename, **kwargs):
     if self.data_y.shape[1] > 1:
         kwargs["header"] = [
             "2θ",
         ] + (not_none(self.y_names, []))
     parser.write(filename, self.data_x, self._data_y.transpose(), **kwargs)
 def __init__(self, *args, **kwargs):
     self.title = not_none(kwargs.pop("title", None), self.title)
     super(ObjectListStoreController, self).__init__(*args, **kwargs)