Пример #1
0
#__slot__限制实例属性(类成员属性不限制)
#__slot__限制实例方法(类方法不限制)
#不限制子类添加属性或方法

import types


class A:
    __slots__ = ('name', 'age')

    def __init__(self, name, age):
        self.name = name
        self.age = age

    def only(self):
        pass


class B(A):
    pass


def test():
    print('hello world')


b = B('xt', 25)
b.gender = '男'
b.test = types.MethodType(test, b)
def node_will_send_incorrect_catchup(node):
    node.nodeMsgRouter.routes[CatchupReq] = types.MethodType(
        _sendIncorrectTxns,
        node.ledgerManager
    )
Пример #3
0
 def create_bound_method(func, obj):
     return types.MethodType(func, obj, obj.__class__)
Пример #4
0
    def export_combined(self, name):
        models = self.models.values()
        last_real_position = None
        # Sort models by Z max to print smaller objects first
        models.sort(key=lambda x: x.dims[-1])
        alllayers = []
        for (model_i, model) in enumerate(models):

            def add_offset(layer):
                return layer.z + model.offsets[
                    2] if layer.z is not None else layer.z

            alllayers += [(add_offset(layer), model_i, layer_i)
                          for (layer_i,
                               layer) in enumerate(model.gcode.all_layers)
                          if layer]
        alllayers.sort()
        laste = [0] * len(models)
        lasttool = [0] * len(models)
        lastrelative = [False] * len(models)
        with open(name, "w") as f:
            analyzer = gcoder.GCode(None, get_home_pos(self.build_dimensions))
            analyzer.write = types.MethodType(
                lambda self, line: gcoder_write(self, f, line), analyzer)
            for (layer_z, model_i, layer_i) in alllayers:
                model = models[model_i]
                layer = model.gcode.all_layers[layer_i]
                r = math.radians(model.rot)
                o = model.offsets
                co = model.centeroffset
                offset_pos = last_real_position if last_real_position is not None else (
                    0, 0, 0)
                analyzer.write("; %f %f %f\n" % offset_pos)
                trans = (-(o[0] + co[0]), -(o[1] + co[1]), -(o[2] + co[2]))
                trans_wpos = (offset_pos[0] + trans[0],
                              offset_pos[1] + trans[1],
                              offset_pos[2] + trans[2])
                analyzer.write("; GCodePlater: Model %d Layer %d at Z = %s\n" %
                               (model_i, layer_i, layer_z))
                if lastrelative[model_i]:
                    analyzer.write("G91\n")
                else:
                    analyzer.write("G90\n")
                if analyzer.current_tool != lasttool[model_i]:
                    analyzer.write("T%d\n" % lasttool[model_i])
                analyzer.write("G92 X%.5f Y%.5f Z%.5f\n" % trans_wpos)
                analyzer.write("G92 E%.5f\n" % laste[model_i])
                for l in layer:
                    if l.command != "G28" and (l.command != "G92"
                                               or extrusion_only(l)):
                        if r == 0:
                            analyzer.write(l.raw + "\n")
                        else:
                            analyzer.write(
                                rewrite_gline(co, l, math.cos(r), math.sin(r))
                                + "\n")
                # Find the current real position & E
                last_real_position = analyzer.current_pos
                laste[model_i] = analyzer.current_e
                lastrelative[model_i] = analyzer.relative
                lasttool[model_i] = analyzer.current_tool
        print _("Exported merged G-Codes to %s") % name
Пример #5
0
 def create_unbound_method(func, cls):
     return types.MethodType(func, None, cls)
Пример #6
0
        wspace = self.process.sourcemanager.get(wspaceName)
    else:
        if not isDEBUG:
            self.logger.logERROR(
                "RooWorkspace '{0}' not found".format(wspaceName))
            self.logger.logDEBUG(
                "Please access RooWorkspace with WspaceReader")
            raise RuntimeError
        wspace = RooWorkspace(wspaceName)
        self.process.sourcemanager.update(wspaceName, wspace)
    wspace.addClassDeclImportDir(modulePath + '/cpp')
    wspace.addClassImplImportDir(modulePath + '/cpp')
    return wspace


ObjProvider.getWspace = types.MethodType(getWspace, None, ObjProvider)

#########################
# Now start define PDFs #
#########################


def buildGenericObj(self, objName, factoryCmd, varNames):
    """Build with RooWorkspace.factory. See also RooFactoryWSTool.factory"""
    wspace = self.getWspace()
    obj = wspace.obj(objName)
    if obj == None:
        self.logger.logINFO("Build {0} from scratch.".format(objName))
        for v in varNames:
            if wspace.obj(v) == None:
                getattr(wspace, 'import')(globals()[v])
Пример #7
0
 def MethodType(func, instance):
     return types.MethodType(func, instance, type(instance))
Пример #8
0
def solve_beer_lambert(solar_cell, options):
    """ Calculates the reflection, transmission and absorption of a solar cell object using the Beer-Lambert law. Reflection is not really calculated and needs to be provided externally, otherwise it is assumed to be zero.

    :param solar_cell:
    :param options:
    :return:
    """
    wl_m = options.wavelength
    solar_cell.wavelength = options.wavelength

    fraction = np.ones(wl_m.shape)

    # We include the shadowing losses
    if hasattr(solar_cell, 'shading'):
        fraction *= (1 - solar_cell.shading)

    # And the reflexion losses
    if hasattr(solar_cell,
               'reflectivity') and solar_cell.reflectivity is not None:
        solar_cell.reflected = solar_cell.reflectivity(wl_m)
        fraction *= (1 - solar_cell.reflected)
    else:
        solar_cell.reflected = np.zeros(fraction.shape)

    # Now we calculate the absorbed and transmitted light. We first get all the relevant parameters from the objects
    widths = []
    alphas = []
    n_layers_junction = []

    for j, layer_object in enumerate(solar_cell):

        # Attenuation due to absorption in the AR coatings or any layer in the front that is not part of the junction
        if type(layer_object) is Layer:
            widths.append(layer_object.width)
            alphas.append(layer_object.material.alpha(wl_m))
            n_layers_junction.append(1)

        # For each Tunnel junctions will have, at most, a resistance an some layers absorbing light.
        elif type(layer_object) is TunnelJunction:
            n_layers_junction.append(len(layer_object))
            for i, layer in enumerate(layer_object):
                widths.append(layer.width)
                alphas.append(layer.material.alpha(wl_m))

        # For each junction, and layer within the junction, we get the absorption coeficient and the layer width.
        elif type(layer_object) is Junction:
            n_layers_junction.append(len(layer_object))
            kind = solar_cell[j].kind if hasattr(solar_cell[j],
                                                 'kind') else None

            if kind == '2D':
                # If the junction has a Jsc or EQE already defined, we ignore that junction in the optical calculation
                if hasattr(solar_cell[j], 'jsc') or hasattr(
                        solar_cell[j], 'eqe'):
                    print(
                        'Warning: A junction of kind "2D" found. Junction ignored in the optics calculation!'
                    )

                    w = layer_object.width

                    def alf(x):
                        return 0.0 * x

                    solar_cell[j].alpha = alf
                    solar_cell[j].reflected = interp1d(wl_m,
                                                       solar_cell.reflected,
                                                       bounds_error=False,
                                                       fill_value=(0, 0))

                    widths.append(w)
                    alphas.append(alf(wl_m))

                # Otherwise, we try to treat is as a DB junction from the optical point of view
                else:
                    ASC.absorptance_detailed_balance(solar_cell[j])
                    w = layer_object.width

                    def alf(x):
                        return -1 / w * np.log(
                            np.maximum(1 - layer_object.absorptance(x), 1e-3))

                    solar_cell[j].alpha = alf
                    solar_cell[j].reflected = interp1d(wl_m,
                                                       solar_cell.reflected,
                                                       bounds_error=False,
                                                       fill_value=(0, 0))

                    widths.append(w)
                    alphas.append(alf(wl_m))

            elif kind == 'DB':
                ASC.absorptance_detailed_balance(solar_cell[j])
                w = layer_object.width

                def alf(x):
                    return -1 / w * np.log(
                        np.maximum(1 - layer_object.absorptance(x), 1e-3))

                solar_cell[j].alpha = alf
                solar_cell[j].reflected = interp1d(wl_m,
                                                   solar_cell.reflected,
                                                   bounds_error=False,
                                                   fill_value=(0, 0))

                widths.append(w)
                alphas.append(alf(wl_m))

            else:
                for i, layer in enumerate(layer_object):
                    widths.append(layer.width)
                    alphas.append(layer.material.alpha(wl_m))

    # With all this information, we are ready to calculate the absorbed light
    diff_absorption, transmitted, all_absorbed = calculate_absorption_beer_lambert(
        widths, alphas, fraction)

    # Each building block (layer or junction) needs to have access to the absorbed light in its region.
    # We update each object with that information.

    I0 = 1 * fraction  # need the *1 because we DO NOT want to modify fraction! Will mess up profile calculation

    layers_above_offset = np.cumsum([0] + n_layers_junction)

    for j in range(len(solar_cell)):
        solar_cell[j].diff_absorption = diff_absorption
        solar_cell[j].absorbed = types.MethodType(absorbed, solar_cell[j])

        # total absorption at each wavelength, per layer
        A_junc = np.zeros_like(wl_m)

        for k in range(n_layers_junction[j]):
            ilayer = layers_above_offset[j] + k
            A_layer = I0 * (1 - np.exp(-alphas[ilayer] * widths[ilayer]))
            A_junc += A_layer
            I0 -= A_layer

        solar_cell[j].layer_absorption = A_junc

    solar_cell.transmitted = transmitted
    solar_cell.absorbed = all_absorbed
Пример #9
0
def set_local_methods(self, child):
    """
    set some local methods.
    """
    # for Param instance
    if isinstance(child, param.Param):
        def _get_local_feed_dict(self):
            return {self._tf_array: self.value}
        child.get_local_feed_dict = types.MethodType(_get_local_feed_dict, child)
        # add get_global_free_state method only for Param (not for LocalParam)
        if not isinstance(child, LocalParam):
            def _get_global_free_state(self):
                return self.get_free_state()
            child.get_global_free_state = types.MethodType(_get_global_free_state, child)

    # for Parameterized instance.
    elif isinstance(child, param.Parameterized):
        # Append this 'set_local_methods' first.
        child.set_local_methods = types.MethodType(set_local_methods, child)
        # Next append some local methods.
        child.get_local_feed_dict = types.MethodType(get_local_feed_dict, child)
        # get local and global free_state
        child.get_local_free_state = types.MethodType(get_local_free_state, child)
        child.get_global_free_state = types.MethodType(get_global_free_state, child)
        # set_local_state and set_global_state
        child.set_local_state = types.MethodType(set_local_state, child)
        child.set_global_state = types.MethodType(set_global_state, child)
        # make_local_tf_array, make_global_tf_array
        child.make_local_tf_array = types.MethodType(make_local_tf_array, child)
        child.make_global_tf_array = types.MethodType(make_global_tf_array, child)
        # get_local_params
        child.get_local_params = types.MethodType(get_local_params, child)
        # set_local_data
        #child.get_local_data = types.MethodType(get_local_data, child)
        # get_local_auxil_var
        child.set_local_train_var = types.MethodType(set_local_train_var, child)
        child.get_local_train_var = types.MethodType(get_local_train_var, child)
Пример #10
0
def remote(irc, source, args):
    """[--service <service name>] <network> <command>

    Runs <command> on the remote network <network>. Plugin responses sent using irc.reply() are
    supported and returned here, but others are dropped due to protocol limitations."""
    args = remote_parser.parse_args(args)
    if not args.command:
        irc.error('No command given!')
        return

    netname = args.network

    permissions.check_permissions(irc, source, [
        # Quite a few permissions are allowed. 'networks.remote' is the global permission,
        'networks.remote',
        # networks.remote.<network> allows running any command on a specific network,
        'networks.remote.%s' % netname,
        # networks.remote.<network>.<service> allows running any command on the given service on a
        # specific network,
        'networks.remote.%s.%s' % (netname, args.service),
        # and networks.remote.<network>.<service>.<command> narrows this further into which command
        # can be used.
        'networks.remote.%s.%s.%s' % (netname, args.service, args.command[0])
    ])

    # XXX: things like 'remote network1 remote network2 echo hi' will crash PyLink if the source network is network1...
    global REMOTE_IN_USE
    if REMOTE_IN_USE.is_set():
        irc.error("The 'remote' command can not be nested.")
        return

    REMOTE_IN_USE.set()
    if netname == irc.name:
        # This would actually throw _remote_reply() into a loop, so check for it here...
        # XXX: properly fix this.
        irc.error("Cannot remote-send a command to the local network; use a normal command!")
        REMOTE_IN_USE.clear()
        return

    try:
        remoteirc = world.networkobjects[netname]
    except KeyError:  # Unknown network.
        irc.error('No such network %r (case sensitive).' % netname)
        REMOTE_IN_USE.clear()
        return

    if args.service not in world.services:
        irc.error('Unknown service %r.' % args.service)
        REMOTE_IN_USE.clear()
        return
    elif not remoteirc.connected.is_set():
        irc.error('Network %r is not connected.' % netname)
        REMOTE_IN_USE.clear()
        return
    elif not world.services[args.service].uids.get(netname):
        irc.error('The requested service %r is not available on %r.' % (args.service, netname))
        REMOTE_IN_USE.clear()
        return

    # Force remoteirc.called_in to something private in order to prevent
    # accidental information leakage from replies.
    try:
        remoteirc.called_in = remoteirc.called_by = remoteirc.pseudoclient.uid

        # Set the identification override to the caller's account.
        remoteirc.pseudoclient.account = irc.users[source].account
    except:
        REMOTE_IN_USE.clear()
        raise

    def _remote_reply(placeholder_self, text, **kwargs):
        """
        reply() rerouter for the 'remote' command.
        """
        assert irc.name != placeholder_self.name, \
            "Refusing to route reply back to the same " \
            "network, as this would cause a recursive loop"
        log.debug('(%s) networks.remote: re-routing reply %r from network %s', irc.name,
                  text, placeholder_self.name)

        # Override the source option to make sure the source is valid on the local network.
        if 'source' in kwargs:
            del kwargs['source']
        irc.reply(text, source=irc.pseudoclient.uid, **kwargs)

    old_reply = remoteirc._reply

    with remoteirc._reply_lock:
        try:  # Remotely call the command (use the PyLink client as a dummy user).
            # Override the remote irc.reply() to send replies HERE.
            log.debug('(%s) networks.remote: overriding reply() of IRC object %s', irc.name, netname)
            remoteirc._reply = types.MethodType(_remote_reply, remoteirc)
            world.services[args.service].call_cmd(remoteirc, remoteirc.pseudoclient.uid,
                                                  ' '.join(args.command))
        finally:
            # Restore the original remoteirc.reply()
            log.debug('(%s) networks.remote: restoring reply() of IRC object %s', irc.name, netname)
            remoteirc._reply = old_reply
            # Remove the identification override after we finish.
            try:
                remoteirc.pseudoclient.account = ''
            except:
                log.warning('(%s) networks.remote: failed to restore pseudoclient account for %s; '
                            'did the remote network disconnect while running this command?', irc.name, netname)
            REMOTE_IN_USE.clear()
Пример #11
0
 def setResidualFn(self, fn):
     setattr(self, '_residual_fn', types.MethodType(fn, self,
                                                    self.__class__))
     if self.usePsyco:
         psyco.bind(self._residual_fn)
Пример #12
0
def monkeyPatchSdbConnection(sdb):
    """
    :type sdb: SDBConnection
    """
    sdb.put_attributes = types.MethodType(_put_attributes_using_post, sdb)
Пример #13
0
def test_unstick_axes(robot, smoothie):
    import types

    smoothie.simulating = False
    robot._driver = smoothie

    def update_position_mock(self, default=None):
        if default is None:
            default = self._position

        updated_position = self._position.copy()
        updated_position.update(**default)

    robot._driver.update_position = types.MethodType(
        update_position_mock, robot._driver)

    current_log = []

    def send_command_mock(self, command, timeout=None):
        nonlocal current_log
        current_log.append(command)
        if 'M119' in command:
            smoothie_switch_res = 'X_max:0 Y_max:0 Z_max:0 A_max:0 B_max:0 C_max:0'  # NOQA
            smoothie_switch_res += ' _pins '
            smoothie_switch_res += '(XL)2.01:0 (YL)2.01:0 (ZL)2.01:0 '
            smoothie_switch_res += '(AL)2.01:0 (BL)2.01:0 (CL)2.01:0 Probe: 0\r\n'   # NOQA
            return smoothie_switch_res

    robot._driver._send_command = types.MethodType(
        send_command_mock, robot._driver)

    robot._driver.unstick_axes('BC')

    expected = [
        'M203.1 B1 C1',  # slow them down
        'M119',  # get the switch status
        'M907 A0.1 B0.5 C0.5 X0.3 Y0.3 Z0.1 G4P0.005 G0B-1C-1',  # move
        'M907 A0.1 B0.05 C0.05 X0.3 Y0.3 Z0.1 G4P0.005',  # set plunger current
        'M203.1 A125 B40 C40 X600 Y400 Z125'  # return to normal speed
    ]

    assert current_log == expected

    current_log = []
    robot._driver.unstick_axes('XYZA')

    expected = [
        'M203.1 A1 X1 Y1 Z1',  # slow them down
        'M119',  # get the switch status
        'M907 A0.8 B0.05 C0.05 X1.25 Y1.25 Z0.8 G4P0.005 G0A-1X-1Y-1Z-1',
        'M203.1 A125 B40 C40 X600 Y400 Z125'  # return to normal speed
    ]

    assert current_log == expected

    def send_command_mock(self, command, timeout=None):
        nonlocal current_log
        current_log.append(command)
        if 'M119' in command:
            smoothie_switch_res = 'X_max:0 Y_max:0 Z_max:0 A_max:0 B_max:0 C_max:1'  # NOQA
            smoothie_switch_res += ' _pins '
            smoothie_switch_res += '(XL)2.01:0 (YL)2.01:0 (ZL)2.01:0 '
            smoothie_switch_res += '(AL)2.01:0 (BL)2.01:0 (CL)2.01:0 Probe: 0\r\n'   # NOQA
            return smoothie_switch_res

    robot._driver._send_command = types.MethodType(
        send_command_mock, robot._driver)

    current_log = []
    robot._driver.unstick_axes('BC')

    expected = [
        'M203.1 B1 C1',  # set max-speeds
        'M119',  # get switch status
        'M907 A0.1 B0.5 C0.05 X0.3 Y0.3 Z0.1 G4P0.005 G0B-2',  # MOVE B
        'M907 A0.1 B0.05 C0.05 X0.3 Y0.3 Z0.1 G4P0.005',  # low current B
        'M907 A0.1 B0.05 C0.5 X0.3 Y0.3 Z0.1 G4P0.005 G28.2C',  # HOME C
        'M907 A0.1 B0.05 C0.05 X0.3 Y0.3 Z0.1 G4P0.005',  # low current C
        'M203.1 A125 B40 C40 X600 Y400 Z125'  # reset max-speeds
    ]
    assert current_log == expected

    def send_command_mock(self, command, timeout=None):
        nonlocal current_log
        current_log.append(command)
        if 'M119' in command:
            smoothie_switch_res = 'X_max:0 Y_max:0 Z_max:0 A_max:0 B_max:1 C_max:1'  # NOQA
            smoothie_switch_res += ' _pins '
            smoothie_switch_res += '(XL)2.01:0 (YL)2.01:0 (ZL)2.01:0 '
            smoothie_switch_res += '(AL)2.01:0 (BL)2.01:0 (CL)2.01:0 Probe: 0\r\n'   # NOQA
            return smoothie_switch_res

    robot._driver._send_command = types.MethodType(
        send_command_mock, robot._driver)

    current_log = []
    robot._driver.unstick_axes('BC')

    expected = [
        'M203.1 B1 C1',  # set max-speeds
        'M119',  # get switch status
        'M907 A0.1 B0.5 C0.5 X0.3 Y0.3 Z0.1 G4P0.005 G28.2BC',  # HOME BC
        'M907 A0.1 B0.05 C0.05 X0.3 Y0.3 Z0.1 G4P0.005',  # low current BC
        'M203.1 A125 B40 C40 X600 Y400 Z125'  # reset max-speeds
    ]
    assert current_log == expected
Пример #14
0
	def initialize(self, metadata=None):
		dialog = self.dialog
		title = self.plugin.project.title() or self.plugin.project.readEntry("WMSServiceTitle", "/")[0]
		dialog.project_title.setText(title)

		map_canvas = self.plugin.iface.mapCanvas()
		self.base_layers_tree = self.plugin.get_project_base_layers()
		self.overlay_layers_tree = self.plugin.get_project_layers()

		def expiration_toggled(checked):
			dialog.expiration.setEnabled(checked)
		dialog.enable_expiration.toggled.connect(expiration_toggled)
		dialog.expiration.setDate(datetime.date.today() + datetime.timedelta(days=1))

		resolutions = self.plugin.project_layers_resolutions()
		self._update_min_max_scales(resolutions)

		def blank_toggled(checked):
			if checked:
				dialog.default_baselayer.insertItem(0, 'Blank', 'BLANK')
			else:
				dialog.default_baselayer.removeItem(0)

		def osm_toggled(checked, project_resolutions=resolutions):
			resolutions = set(project_resolutions)
			position = 1 if dialog.blank.isChecked() else 0
			if checked:
				dialog.default_baselayer.insertItem(position, OSM_LAYER['title'], OSM_LAYER['name'])
				resolutions.update(OSM_LAYER['resolutions'])
			else:
				dialog.default_baselayer.removeItem(position)

			if dialog.google.currentIndex() > 0:
				resolutions.update(GOOGLE_LAYERS[0]['resolutions'])
			self._update_min_max_scales(resolutions)

		def google_layer_changed(index, project_resolutions=resolutions):
			resolutions = set(project_resolutions)
			position = 1 if dialog.blank.isChecked() else 0
			if dialog.osm.isChecked():
				position += 1
				resolutions.update(OSM_LAYER['resolutions'])

			google_layers = [dialog.google.itemText(i) for i in range(1, 5)]
			contains_google_layer = dialog.default_baselayer.itemText(position) in google_layers
			if index > 0:
				google_layer = GOOGLE_LAYERS[index-1]
				if contains_google_layer:
					dialog.default_baselayer.setItemText(position, dialog.google.currentText())
					dialog.default_baselayer.setItemData(position, google_layer['name'])
				else:
					dialog.default_baselayer.insertItem(position, dialog.google.currentText(), google_layer['name'])
				resolutions.update(google_layer['resolutions'])
			elif contains_google_layer:
				dialog.default_baselayer.removeItem(position)
			self._update_min_max_scales(resolutions)

		dialog.blank.toggled.connect(blank_toggled)
		dialog.osm.toggled.connect(osm_toggled)
		dialog.google.currentIndexChanged.connect(google_layer_changed)

		def scales_changed(index):
			self.validate()
		dialog.min_scale.currentIndexChanged.connect(scales_changed)
		dialog.max_scale.currentIndexChanged.connect(scales_changed)

		projection = map_canvas.mapRenderer().destinationCrs().authid()
		dialog.osm.setEnabled(projection == 'EPSG:3857')
		dialog.google.setEnabled(projection == 'EPSG:3857')

		dialog.extent_layer.addItem("All layers", list(map_canvas.fullExtent().toRectF().getCoords()))
		for layer in self.plugin.layers_list():
			if self.plugin.is_base_layer_for_publish(layer):
				dialog.default_baselayer.addItem(layer.name(), layer.name())
			if self.plugin.is_base_layer_for_publish(layer) or self.plugin.is_overlay_layer_for_publish(layer):
				extent = list(map_canvas.mapRenderer().layerExtentToOutputExtent(layer, layer.extent()).toRectF().getCoords())
				dialog.extent_layer.addItem(layer.name(), extent)

		dialog.message_valid_until.setDate(datetime.date.today() + datetime.timedelta(days=1))

		def create_layer_widget(node):
			sublayers_widgets = []
			for child in node.children:
				sublayer_widget = create_layer_widget(child)
				if sublayer_widget:
					sublayers_widgets.append(sublayer_widget)
			if sublayers_widgets:
				group_item = QStandardItem(node.name)
				for child in sublayers_widgets:
					group_item.appendRow(child)
				return group_item
			elif node.layer:
				layer = node.layer
				is_vector_layer = layer.type() == QgsMapLayer.VectorLayer
				layer_item = QStandardItem(layer.name())
				layer_item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsTristate)
				layer_item.setData(layer, Qt.UserRole)
				layer_item.setCheckState(Qt.Checked)
				hidden = QStandardItem()
				vector = QStandardItem()
				hidden.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsTristate)
				hidden.setCheckState(Qt.Unchecked)
				export = QStandardItem()
				if is_vector_layer:
					export.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsTristate)
					export.setCheckState(Qt.Checked)
					vector.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsTristate)
					vector.setCheckState(Qt.Unchecked)
				else:
					export.setFlags(Qt.ItemIsSelectable)
					vector.setFlags(Qt.ItemIsSelectable)
				return [layer_item, vector, export, hidden]

		if self.overlay_layers_tree:
			layers_model = QStandardItemModel()
			def columnItem(self, item, column):
				""""Returns item from layers tree at the same row as given item (of any column) and given column index"""
				row = item.row()
				if item.parent():
					return item.parent().child(row, column)
				else:
					return self.item(row, column)
			layers_model.columnItem = types.MethodType(columnItem, layers_model)
			layers_model.setHorizontalHeaderLabels(['Layer', 'Vector', 'Allow drawing', 'Hidden'])
			dialog.treeView.setModel(layers_model)
			layers_root = create_layer_widget(self.overlay_layers_tree)
			while layers_root.rowCount():
				layers_model.appendRow(layers_root.takeRow(0))
			dialog.treeView.header().setResizeMode(0, QHeaderView.Stretch)
			dialog.treeView.header().setVisible(True)

			def layer_item_changed(item):
				if item.model().columnItem(item, 0).data(Qt.UserRole): # check if item is layer item
					dependent_items = None
					if item.column() == 0:
						enabled = item.checkState() == Qt.Checked
						for index in (1, 2, 3):
							item.model().columnItem(item, index).setEnabled(enabled)
					# Enable/disable checkboxes of 'Allow drawing' and 'Hidden' columns when 'Vector' column change state
					elif item.column() == 1 and item.isEnabled():
						item.model().columnItem(item, 2).setEnabled(item.checkState() == Qt.Unchecked)
						item.model().columnItem(item, 3).setEnabled(item.checkState() == Qt.Unchecked)
					# Enable/disable checkboxes of 'Allow drawing' and 'Vector' columns when 'Hidden' column change state
					elif item.column() == 3 and item.isEnabled():
						item.model().columnItem(item, 1).setEnabled(item.checkState() == Qt.Unchecked)
						item.model().columnItem(item, 2).setEnabled(item.checkState() == Qt.Unchecked)

			layers_model.itemChanged.connect(layer_item_changed)

		if metadata:
			try:
				self.setup_config_page_from_metadata(metadata)
			except:
				QMessageBox.warning(None, 'Warning', 'Failed to load settings from last published version')
		self.validate()
Пример #15
0
def snip_skip_layers(model,
                     keep_ratio,
                     loader,
                     loss,
                     index_to_prune,
                     previous_masks,
                     device='cpu',
                     reinit=True):
    inputs, targets = next(iter(loader))
    inputs, targets = inputs.to(device), targets.to(device)
    _model = copy.deepcopy(model)

    blocks = get_blocs(_model)

    if index_to_prune >= len(blocks):
        print("index out of bloc range: index {}, number of blocks {}".format(
            index_to_prune, len(blocks)))
        return None

    for layer in _model.modules():
        conv2 = isinstance(layer, nn.Conv2d)
        lin = isinstance(layer, nn.Linear)
        if conv2 or lin:
            layer.weight_mask = nn.Parameter(torch.ones_like(layer.weight))
            layer.weight.requires_grad = False
            if conv2:
                layer.forward = types.MethodType(snip_forward_conv2d, layer)
            if lin:
                layer.forward = types.MethodType(snip_forward_linear, layer)

    _model.to(device)
    _model.zero_grad()
    outputs = _model(inputs)
    total_loss = loss(outputs, targets)
    total_loss.backward()

    masks = []
    for idx, bloc in enumerate(blocks):
        if idx != index_to_prune:
            masks.append([])
            continue
        grads_abs = []
        for layer in bloc.modules():
            if isinstance(layer, nn.Conv2d) or isinstance(
                    layer, nn.Linear) and layer.weight_mask.grad is not None:
                grads_abs.append(torch.abs(layer.weight_mask.grad))
        if len(grads_abs) == 0:
            masks.append([])
            continue
        all_scores = torch.cat([torch.flatten(x) for x in grads_abs])
        norm_factor = torch.sum(all_scores)
        all_scores.div_(norm_factor)
        num_params_to_keep = int(len(all_scores) * keep_ratio[idx])
        threshold, _ = torch.topk(all_scores, num_params_to_keep, sorted=True)
        acceptable_score = threshold[-1]

        keep_masks = []
        for g in grads_abs:
            keep_masks.append(((g / norm_factor) >= acceptable_score).float())
        masks.append(keep_masks)

    if previous_masks is not None:
        for i in range(index_to_prune):
            masks[i] = previous_masks[i]

    return masks
Пример #16
0
def _finalize_metric(metric):
    metric.update_state = types.MethodType(
        metrics_utils.update_state_wrapper(metric.keras_api.update_state),
        metric)
    metric.result = metric.keras_api.result
Пример #17
0
def enable_dump_blob(model, graph=False, mode='print'):

    node_attr = '_dbg_node'
    prefix_attr = '_dbg_prefix'
    inp_attr = '_dbg_input'
    out_attr = '_dbg_output'
    saver_attr = '_saver_hooked'

    def save_io(module, input, output):
        setattr(module, inp_attr, input)
        setattr(module, out_attr, output)

    def print_saved_io(module):
        saved_inputs = getattr(module, inp_attr, None)
        saved_outputs = getattr(module, out_attr, None)
        if saved_inputs is None or saved_outputs is None:
            print('[WARN] No saved blob: node={}, module={}\n'.format(
                node_name, module_name))
            return

        print_io(module, saved_inputs, saved_outputs)

    def print_io(module, input, output):
        node = getattr(module, node_attr) if hasattr(module,
                                                     node_attr) else None
        module_name = getattr(module, prefix_attr) if hasattr(
            module, prefix_attr) else None

        if node:
            print('node({}): {}'.format(node.idx, node.name))

        if module_name:
            print('module: {}({})'.format(module_name, type(module)))

        if isinstance(module, FakeQuantizer):
            print('quant_info:', module.export_quant_info())

        # saved_inputs/outputs may be empty tuple.
        print_blob(input, 'input')
        print_blob(output, 'output')
        print('')

    def print_blob(blob, prefix):
        if isinstance(blob, tuple) and len(blob) > 0:
            for idx, tensor in enumerate(blob):
                if isinstance(tensor, torch.Tensor):
                    print('{}[{}]: sum={}, dtype={}, shape={}'.format(
                        prefix, idx, tensor.sum(), tensor.dtype, tensor.shape))
                else:
                    print('{}{}: {}'.format(prefix, idx, tensor))
        elif isinstance(blob, torch.Tensor):
            print('{}: sum={}, dtype={}, shape={}'.format(
                prefix, blob.sum(), blob.dtype, blob.shape))
        else:
            print(prefix, None)

    def print_saved_blob(model):
        model.apply(print_saved_io)

    # Hook the node to module if the graph is provided.
    if graph:
        for node in graph.nodes:
            module = get_module_by_node(model, node)
            if module:
                setattr(module, node_attr, node)

    hook_func = save_io if mode == 'save' else print_io
    for name, module in model.named_modules():
        setattr(module, prefix_attr, name)
        module.register_forward_hook(hook_func)

    if mode == 'save':
        model.print_saved_blob = types.MethodType(print_saved_blob, model)
    return model
Пример #18
0
 def __getattr__(self, attr):
     try:
         function = self.display_extension_methods[attr]
         return types.MethodType(function, self)
     except KeyError:
         raise AttributeError(attr)
Пример #19
0
def pandas_support(csv):
    # add missing __iter__ method, so pandas accepts body as file-like object
    if not hasattr(csv, "__iter__"):
        csv.__iter__ = types.MethodType(__iter__, csv)
    return csv
Пример #20
0
def getMpFigure(fig=None, clear=True):
    """Return a pyplot figure(); if fig is supplied save it and make it the default
    fig may also be a bool (make a new figure) or an int (return or make a figure (1-indexed;
    python-list style -n supported)
    """

    if not pyplot:
        raise RuntimeError(
            "I am unable to plot as I failed to import matplotlib")

    if isinstance(fig, bool):  # we want a new one
        fig = len(mpFigures) + 1  # matplotlib is 1-indexed

    if isinstance(fig, int):
        i = fig
        if i == 0:
            raise RuntimeError(
                "I'm sorry, but matplotlib uses 1-indexed figures")
        if i < 0:
            try:
                i = sorted(mpFigures.keys())[i]  # simulate list's [-n] syntax
            except IndexError:
                if mpFigures:
                    print("Illegal index: %d" % i, file=sys.stderr)
                i = 1

        def lift(fig):
            fig.canvas._tkcanvas._root().lift(
            )  # == Tk's raise, but raise is a python reserved word

        if i in mpFigures:
            try:
                lift(mpFigures[i])
            except Exception:
                del mpFigures[i]

        if i not in mpFigures:
            for j in range(1, i):
                getMpFigure(j)

            mpFigures[i] = pyplot.figure()

            #
            # Modify pyplot.figure().show() to make it raise the plot too
            #

            def show(self, _show=mpFigures[i].show):
                _show(self)
                try:
                    lift(self)
                except Exception:
                    pass

            # create a bound method
            import types
            mpFigures[i].show = types.MethodType(show, mpFigures[i],
                                                 mpFigures[i].__class__)

        fig = mpFigures[i]

    if not fig:
        i = sorted(mpFigures.keys())[0]
        if i > 0:
            fig = mpFigures[i[-1]]
        else:
            fig = getMpFigure(1)

    if clear:
        fig.clf()

    pyplot.figure(fig.number)  # make it active

    return fig
Пример #21
0
    def __init__(self, cr, mat=0):
        self.cr = cr
        try:
            self.Toon_initialized
            return
        except:
            self.Toon_initialized = 1
        Avatar.Avatar.__init__(self, mat)
        ToonDNA.ToonDNA.__init__(self)
        ToonHead.__init__(self, cr)
        self.collsSetup = False
        self.forwardSpeed = 0.0
        self.rotateSpeed = 0.0
        self.strafeSpeed = 0.0
        self.avatarType = CIGlobals.Toon
        self.track = None
        self.standWalkRunReverse = None
        self.playingAnim = None
        self.playingRate = None
        self.tag = None
        self.money = 0
        self.lookAtTrack = None
        self.portal1 = None
        self.portal2 = None
        self.spineA = NodePath()
        self.tokenIcon = None
        self.tokenIconIval = None
        self.fallSfx = base.audio3d.loadSfx(
            "phase_4/audio/sfx/MG_cannon_hit_dirt.ogg")
        base.audio3d.attachSoundToObject(self.fallSfx, self)
        self.eyes = loader.loadTexture("phase_3/maps/eyes.jpg",
                                       "phase_3/maps/eyes_a.rgb")
        self.myTaskId = random.uniform(0, 1231231232132131231232)
        self.closedEyes = loader.loadTexture("phase_3/maps/eyesClosed.jpg",
                                             "phase_3/maps/eyesClosed_a.rgb")
        self.soundChatBubble = loader.loadSfx(
            "phase_3/audio/sfx/GUI_balloon_popup.ogg")
        self.shadowCaster = None
        self.accessories = []
        self.backpack = None
        self.forceRunSpeed = False
        self.animFSM = ClassicFSM('Toon', [
            State('off', self.enterOff, self.exitOff),
            State('neutral', self.enterNeutral, self.exitNeutral),
            State('swim', self.enterSwim, self.exitSwim),
            State('walk', self.enterWalk, self.exitWalk),
            State('run', self.enterRun, self.exitRun),
            State('bow', self.enterBow, self.exitBow),
            State('openBook', self.enterOpenBook, self.exitOpenBook),
            State('readBook', self.enterReadBook, self.exitReadBook),
            State('closeBook', self.enterCloseBook, self.exitCloseBook),
            State('teleportOut', self.enterTeleportOut, self.exitTeleportOut),
            State('teleportIn', self.enterTeleportIn, self.exitTeleportIn),
            State('died', self.enterDied, self.exitDied),
            State('fallFWD', self.enterFallFWD, self.exitFallFWD),
            State('fallBCK', self.enterFallBCK, self.exitFallBCK),
            State('jump', self.enterJump, self.exitJump),
            State('leap', self.enterLeap, self.exitLeap),
            State('laugh', self.enterLaugh, self.exitLaugh),
            State('happy', self.enterHappyJump, self.exitHappyJump),
            State('shrug', self.enterShrug, self.exitShrug),
            State('hdance', self.enterHDance, self.exitHDance),
            State('wave', self.enterWave, self.exitWave),
            State('scientistEmcee', self.enterScientistEmcee,
                  self.exitScientistEmcee),
            State('scientistWork', self.enterScientistWork,
                  self.exitScientistWork),
            State('scientistGame', self.enterScientistGame,
                  self.exitScientistGame),
            State('scientistJealous', self.enterScientistJealous,
                  self.exitScientistJealous),
            State('cringe', self.enterCringe, self.exitCringe),
            State('conked', self.enterConked, self.exitConked),
            State('win', self.enterWin, self.exitWin),
            State('walkBack', self.enterWalkBack, self.exitWalkBack),
            State('deadNeutral', self.enterDeadNeutral, self.exitDeadNeutral),
            State('deadWalk', self.enterDeadWalk, self.exitDeadWalk),
            State('squish', self.enterSquish, self.exitSquish),
            State('Happy', self.enterHappy, self.exitHappy),
            State('Sad', self.enterSad, self.exitSad),
            State('Swim', self.enterSwim, self.exitSwim)
        ], 'off', 'off')
        animStateList = self.animFSM.getStates()
        self.animFSM.enterInitialState()

        if not hasattr(self, 'uniqueName'):
            self.uniqueName = types.MethodType(uniqueName, self)

        self.activities = {
            ACT_DIE: Die(self),
            ACT_VICTORY_DANCE: VictoryDance(self),
            ACT_TOON_BOW: Bow(self),
            ACT_JUMP: Jump(self),
            ACT_TOON_PRESENT: Present(self),
            ACT_TOON_POINT: Point(self),
            ACT_PRESS_BUTTON: PressButton(self),
            ACT_TOON_FALL: Fall(self)
        }
Пример #22
0
def make_better(client):
    client.put_parameter_and_wait = types.MethodType(put_parameter_and_wait,
                                                     client)
    return client
Пример #23
0
    async def run_event_handlers(
        self,
        event: Event,
        preposition: Preposition,
        *args,
        return_exceptions: bool = False,
        **kwargs,
    ) -> Optional[List[EventResult]]:
        """
        Run handlers for the given event and preposition and return the results
        or None if there are no handlers.

        Exceptions are rescued and returned as event result objects in which the
        `value` attribute is the exception.
        """
        if not isinstance(event, Event):
            raise ValueError(
                f"event must be an Event object, got {event.__class__.__name__}"
            )

        event_handlers = self.get_event_handlers(event, preposition)
        if not event_handlers:
            return None

        with self.current():
            with EventContext(event=event, preposition=preposition).current():
                results: List[EventResult] = []
                for event_handler in event_handlers:
                    # NOTE: Explicit kwargs take precendence over those defined during handler declaration
                    merged_kwargs = event_handler.kwargs.copy()
                    merged_kwargs.update(kwargs)
                    try:
                        method = types.MethodType(event_handler.handler, self)
                        async with event.on_handler_context_manager(self):
                            if asyncio.iscoroutinefunction(method):
                                value = await asyncio.create_task(
                                    method(*args, **merged_kwargs),
                                    name=f"{preposition}:{event}",
                                )
                            else:
                                value = method(*args, **merged_kwargs)

                        result = EventResult(
                            connector=self,
                            event=event,
                            preposition=preposition,
                            handler=event_handler,
                            value=value,
                        )
                        results.append(result)

                    except Exception as error:
                        if (isinstance(error, servo.errors.EventCancelledError)
                                and preposition != Preposition.before):
                            if return_exceptions:
                                self.logger.warning(
                                    f"Cannot cancel an event from an {preposition} handler: event dispatched"
                                )
                            else:
                                cause = error
                                error = TypeError(
                                    f"Cannot cancel an event from an {preposition} handler"
                                )
                                error.__cause__ = cause

                        # Annotate the exception and reraise to halt execution
                        error.__event_result__ = EventResult(
                            connector=self,
                            event=event,
                            preposition=preposition,
                            handler=event_handler,
                            value=error,
                        )

                        if return_exceptions:
                            results.append(error.__event_result__)
                        else:
                            raise error

        return results
Пример #24
0
    def __init__(self,
                 Model,
                 kl_weight=1.,
                 prior_sigma_1=1.5,
                 prior_sigma_2=0.1,
                 prior_pi=0.5,
                 SVI_Layers=[tf.keras.layers.Dense],
                 normalize=True,
                 task='regression',
                 one_hot=True,
                 **kwargs):
        """SVI Initializer. Turns a neural network into an SVI network.

        Args:
            Model: Input Keras Model.
            kl_weight: Weight Parameter for KL Divergence.
            prior_sigma_1: First sigma for prior on weights
            prior_sigma_2: Second sigma for prior on weights
            prior_pi: First pi for prior on weights, second is 1 - prior_pi
            SVI_Layers: Layer types for which SVI can be applied-- the defaults, dense are garunteed safe, add others at your own risk
            normalize: Whether to normalize input and output values before using-- if you get nans, trying switching! Regression only
            task: regression or classification
            one_hot: are the input targets in one hot format. True if they inputs are one hot. Classification only.

        Returns:
            Nothing lol

        """

        self.model = Model()
        self.SVI_Layers = SVI_Layers
        self.one_hot = one_hot
        self.use_normalization = normalize
        self.task = task

        self.train_std = 0
        self.xmean, self.xstd = 0., 1.
        self.ymean, self.ystd = 0., 1.

        if task == 'regression':
            last_layer = self.model.layers[-1]
            self.dim = last_layer.units
            last_layer.units = 2 * last_layer.units
        if task == 'classification':
            self.dim = self.model.layers[-1].units

        def compute_output_shape(self, input_shape):
            return input_shape[0], self.units

        def kl_loss(self, w, mu, sigma):
            variational_dist = tfp.distributions.Normal(mu, sigma)
            return self.kl_weight * K.sum(
                variational_dist.log_prob(w) - self.log_prior_prob(w))

        def build(self, input_shape):
            self.kernel_mu = self.add_weight(
                name='kernel_mu',
                shape=(input_shape[1], self.units),
                initializer=initializers.RandomNormal(stddev=self.init_sigma),
                trainable=True)
            self.bias_mu = self.add_weight(
                name='bias_mu',
                shape=(self.units, ),
                initializer=initializers.RandomNormal(stddev=self.init_sigma),
                trainable=True)
            self.kernel_rho = self.add_weight(
                name='kernel_rho',
                shape=(input_shape[1], self.units),
                initializer=initializers.Constant(0.0),
                trainable=True)
            self.bias_rho = self.add_weight(
                name='bias_rho',
                shape=(self.units, ),
                initializer=initializers.Constant(0.0),
                trainable=True)
            self._trainable_weights = [
                self.kernel_mu, self.bias_mu, self.kernel_rho, self.bias_rho
            ]  #

        def call(self, inputs, **kwargs):

            if self.built == False:
                self.build(inputs.shape)
                self.built = True

            kernel_sigma = tf.math.softplus(self.kernel_rho)
            kernel = self.kernel_mu + kernel_sigma * tf.random.normal(
                self.kernel_mu.shape)

            bias_sigma = tf.math.softplus(self.bias_rho)
            bias = self.bias_mu + bias_sigma * tf.random.normal(
                self.bias_mu.shape)

            self.add_loss(
                self.kl_loss(kernel, self.kernel_mu, kernel_sigma) +
                self.kl_loss(bias, self.bias_mu, bias_sigma))

            return self.activation(K.dot(inputs, kernel) + bias)

        def log_prior_prob(self, w):
            comp_1_dist = tfp.distributions.Normal(0.0, self.prior_sigma_1)
            comp_2_dist = tfp.distributions.Normal(0.0, self.prior_sigma_2)
            return K.log(self.prior_pi_1 * comp_1_dist.prob(w) +
                         self.prior_pi_2 * comp_2_dist.prob(w))

        for layer in self.model.layers:
            if layer.__class__ in SVI_Layers:
                layer.kl_weight = kl_weight
                layer.prior_sigma_1 = prior_sigma_1
                layer.prior_sigma_2 = prior_sigma_2
                layer.prior_pi_1 = prior_pi
                layer.prior_pi_2 = 1.0 - prior_pi
                layer.init_sigma = np.sqrt(
                    layer.prior_pi_1 * layer.prior_sigma_1**2 +
                    layer.prior_pi_2 * layer.prior_sigma_2**2)
                layer.compute_output_shape = types.MethodType(
                    compute_output_shape, layer)
                layer.build = types.MethodType(build, layer)
                layer.call = types.MethodType(call, layer)
                layer.kl_loss = types.MethodType(kl_loss, layer)
                layer.log_prior_prob = types.MethodType(log_prior_prob, layer)
                layer.built = False
def do_not_tell_clients_about_newly_joined_node(nodes):
    for node in nodes:
        node.sendPoolInfoToClients = types.MethodType(lambda x, y: None, node)
Пример #26
0
    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        Ui_MainWindow.__init__(self)

        # Configure the user interface.
        self.setupUi(self)

        # Snippets special Variables
            # snippet
        self.insideSnippet = False
        self.start_snip = 0
            # Field courant
        self.snippets = []
        self.field = None
        self.field_start = 0
        self.field_long = 0
        self.oldsnip = None

        # Editor
            # Editor tabs = 4 chars
        self.tab_long = 4
        self.setTabEditorWidth(self.tab_long)
            # Coloration syntaxique
        self.highlighter = highlighter.Highlighter(self.editor)
        self.highlighter.modeRest()

        # Browser (viewer)
            # Set permissions to open external links
            # by clicking on them
        self.viewer.openExternalLinks = True
        self.viewer.setOpenExternalLinks(True)

            # Big Hack not to have to create a particluar
            # classe for the browser.
            # TODO : clean derivation
        self.viewer.loadResource = imageBrowser().loadResource

        ## Special attributes :
            # name of the current edited file
        self.fileName = "Noname.rst"
            # absolute path
        self.filePath = INREP
            # boolean to know if file has been saved or not
        self.isSaved = False
            # default directory
        self.default_dir = INREP # or APPLIREP (up to you)

        ## Conversion Dialog
        self.ConvDialog = QtGui.QDialog(self)
        ui = Ui_Converter()
        ui.setupUi(self.ConvDialog)
        self.converterdial = ui

        ## SIGNALS/SLOTS
            # we need to know if source as changed
        self.connect(self.editor, QtCore.SIGNAL("textChanged()"), self.needSave)

        ## ScrollBars editor-viewer
            # scrollbar editor
        self.esb = self.editor.verticalScrollBar()
            # scrollbar viewer
        self.vsb = self.viewer.verticalScrollBar()
            # connexions scrollbar editor --> synchronise view in browser
        self.connect(self.esb, QtCore.SIGNAL("valueChanged(int) "), self.actualiseBSlider)
            # connexions scrollbar viewer --> synchronise view in editor 
        self.connect(self.vsb, QtCore.SIGNAL("valueChanged(int) "), self.actualiseESlider)

        ## zoom event for editor and viewer
        import types
        self.editor.wheelEvent = types.MethodType(zoomEditor, self.editor, self.editor.__class__)
        self.viewer.wheelEvent = types.MethodType(zoomViewer, self.viewer, self.viewer.__class__)

        ## MENU ACTIONS
        self.createActions()

        ## BUTTONS
        self.actionSave.setEnabled(self.isSaved)

        ## APPLY SAVED APP SETTINGS
        self.readSettings()

        ## KEYBOARD SHORTCUTS
        keyTab = QtGui.QShortcut(QtGui.QKeySequence(self.tr("Tab")), self)
        self.connect(keyTab, QtCore.SIGNAL("activated()"), self.whatToDoOnTab)

        QtGui.QShortcut(QtGui.QKeySequence("F1"), self, self.showHelp)
        QtGui.QShortcut(QtGui.QKeySequence("F2"), self, self.indentRegion)
        QtGui.QShortcut(QtGui.QKeySequence("F3"), self, self.unindentRegion)
        QtGui.QShortcut(QtGui.QKeySequence("F4"), self, self.chooseFont)
        QtGui.QShortcut(QtGui.QKeySequence("F5"), self, self.tableCreate)
        QtGui.QShortcut(QtGui.QKeySequence("F6"), self, self.columnMode)


        self.connect(self.editor, QtCore.SIGNAL("textChanged()"), self.updateChilds)

        ## STATUSBAR
        self.statusBar().showMessage("Welcome to reStInPeace, press F1 for Help.")
Пример #27
0
def re_self_method(self, method_name, re_method):
    method = getattr(self, method_name)
    setattr(self, method_name, types.MethodType(lambda *args, **kwds: re_method(method, *args, **kwds), self, self))
Пример #28
0
def patch(pipeline):
    def get_feature_names(pipeline):
        return ["length_of_methods"]

    pipeline.get_feature_names = types.MethodType(get_feature_names, pipeline)
Пример #29
0
def configure_device(device, mqtt_prefix):
    device.auth()
    logging.debug(
        'Connected to \'%s\' Broadlink device at \'%s\' (MAC %s) and started listening to MQTT commands at \'%s#\' '
        % (device.type, device.host[0], ':'.join(
            format(s, '02x') for s in device.mac), mqtt_prefix))

    logging.debug("==--Device authenticated, device: " + str(device))

    broadlink_rm_temperature_interval = cf.get(
        'broadlink_rm_temperature_interval', 0)
    if (device.type == 'RM2'
            or device.type == 'RM4') and broadlink_rm_temperature_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(broadlink_rm_temperature_interval, 1,
                        broadlink_rm_temperature_timer, [
                            scheduler, broadlink_rm_temperature_interval,
                            device, mqtt_prefix
                        ])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    broadlink_sp_energy_interval = cf.get('broadlink_sp_energy_interval', 0)
    if device.type == 'SP2' and broadlink_sp_energy_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(
            broadlink_sp_energy_interval, 1, broadlink_sp_energy_timer,
            [scheduler, broadlink_sp_energy_interval, device, mqtt_prefix])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    broadlink_a1_sensors_interval = cf.get('broadlink_a1_sensors_interval', 0)
    if device.type == 'A1' and broadlink_a1_sensors_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(
            broadlink_a1_sensors_interval, 1, broadlink_a1_sensors_timer,
            [scheduler, broadlink_a1_sensors_interval, device, mqtt_prefix])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    broadlink_mp1_state_interval = cf.get('broadlink_mp1_state_interval', 0)
    if device.type == 'MP1' and broadlink_mp1_state_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(
            broadlink_mp1_state_interval, 1, broadlink_mp1_state_timer,
            [scheduler, broadlink_mp1_state_interval, device, mqtt_prefix])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    if device.type == 'Dooya DT360E':
        # noinspection PyUnusedLocal
        def publish(dev, percentage):
            try:
                percentage = str(percentage)
                topic = mqtt_prefix + "position"
                logging.debug("Sending Dooya position " + percentage +
                              " to topic " + topic)
                mqttc.publish(topic, percentage, qos=qos, retain=retain)
            except:
                logging.exception("Error")

        device.publish = types.MethodType(publish, device)

        broadlink_dooya_position_interval = cf.get(
            'broadlink_dooya_position_interval', 0)
        if broadlink_dooya_position_interval > 0:
            scheduler = sched.scheduler(time.time, time.sleep)
            scheduler.enter(
                broadlink_dooya_position_interval, 1,
                broadlink_dooya_position_timer,
                [scheduler, broadlink_dooya_position_interval, device])
            # scheduler.run()
            tt = SchedulerThread(scheduler)
            tt.daemon = True
            tt.start()

    broadlink_bg1_state_interval = cf.get('broadlink_bg1_state_interval', 0)
    if (device.type == 'BG1') and broadlink_bg1_state_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(
            broadlink_bg1_state_interval, 1, broadlink_bg1_state_timer,
            [scheduler, broadlink_bg1_state_interval, device, mqtt_prefix])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    broadlink_sp4b_state_interval = cf.get('broadlink_sp4b_state_interval', 0)
    if (device.type == 'SP4B') and broadlink_sp4b_state_interval > 0:
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(
            broadlink_sp4b_state_interval, 1, broadlink_sp4b_state_timer,
            [scheduler, broadlink_sp4b_state_interval, device, mqtt_prefix])
        # scheduler.run()
        tt = SchedulerThread(scheduler)
        tt.daemon = True
        tt.start()

    return device
Пример #30
0
    def build_response(self,
                       request,
                       response,
                       from_cache=False,
                       cacheable_methods=None):
        """
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        """
        cacheable = cacheable_methods or self.cacheable_methods
        if not from_cache and request.method in cacheable:
            # Check for any heuristics that might update headers
            # before trying to cache.
            if self.heuristic:
                response = self.heuristic.apply(response)

            # apply any expiration heuristics
            if response.status == 304:
                # We must have sent an ETag request. This could mean
                # that we've been expired already or that we simply
                # have an etag. In either case, we want to try and
                # update the cache if that is the case.
                cached_response = self.controller.update_cached_response(
                    request, response)

                if cached_response is not response:
                    from_cache = True

                # We are done with the server response, read a
                # possible response body (compliant servers will
                # not return one, but we cannot be 100% sure) and
                # release the connection back to the pool.
                response.read(decode_content=False)
                response.release_conn()

                response = cached_response

            # We always cache the 301 responses
            elif response.status == 301:
                self.controller.cache_response(request, response)
            else:
                # Wrap the response file with a wrapper that will cache the
                #   response when the stream has been consumed.
                response._fp = CallbackFileWrapper(
                    response._fp,
                    functools.partial(
                        self.controller.cache_response,
                        request,
                        response,
                    ))
                if response.chunked:
                    super_update_chunk_length = response._update_chunk_length

                    def _update_chunk_length(self):
                        super_update_chunk_length()
                        if self.chunk_left == 0:
                            self._fp._close()

                    response._update_chunk_length = types.MethodType(
                        _update_chunk_length, response)

        resp = super(CacheControlAdapter,
                     self).build_response(request, response)

        # See if we should invalidate the cache.
        if request.method in self.invalidating_methods and resp.ok:
            cache_url = self.controller.cache_url(request.url)
            self.cache.delete(cache_url)

        # Give the request a from_cache attr to let people use it
        resp.from_cache = from_cache

        return resp