Beispiel #1
0
def getattr(inp, *attrnames):
    if len(attrnames) == 1:
        for x in inp:
            yield __builtin__.getattr(x, attrnames[0])
    else:
        for x in inp:
            yield [__builtin__.getattr(x, attrname) for attrname in attrnames]
Beispiel #2
0
    def test_paths(self):
        self.assertTrue(XSectionConfig.current_analysis_path.endswith('/'))
        self.assertTrue(self.config_7TeV.path_to_files.endswith('/'))
        self.assertTrue(
            self.config_7TeV.path_to_unfolding_histograms.endswith('/'))
        self.assertTrue(self.config_8TeV.path_to_files.endswith('/'))

        self.assertTrue('7TeV' in self.config_7TeV.path_to_files)
        self.assertTrue('8TeV' in self.config_8TeV.path_to_files)
        self.assertTrue('13TeV' in self.config_13TeV.path_to_files)

        unfolding_files = [
            'unfolding_powheg_pythia_raw',
            'unfolding_powheg_herwig_raw',
            'unfolding_mcatnlo_raw',
            'unfolding_scale_down_raw',
            'unfolding_scale_up_raw',
            'unfolding_matching_down_raw',
            'unfolding_matching_up_raw',
        ]
        for u_file in unfolding_files:
            full_path = getattr(self.config_7TeV, u_file)
            self.assertEqual(full_path.count('7TeV'), 2)
            full_path = getattr(self.config_8TeV, u_file)
            self.assertEqual(full_path.count('8TeV'), 2)
 def test_paths( self ):
     self.assertTrue( XSectionConfig.current_analysis_path.endswith( '/' ) )
     self.assertTrue( self.config_7TeV.path_to_files.endswith( '/' ) )
     self.assertTrue( self.config_7TeV.path_to_unfolding_histograms.endswith( '/' ) )
     self.assertTrue( self.config_8TeV.path_to_files.endswith( '/' ) )
     
     self.assertTrue( '7TeV' in self.config_7TeV.path_to_files )
     self.assertTrue( '8TeV' in self.config_8TeV.path_to_files )
     
     unfolding_files = ['unfolding_powheg_pythia_raw', 'unfolding_powheg_herwig_raw', 'unfolding_mcatnlo_raw', 'unfolding_scale_down_raw', 'unfolding_scale_up_raw', 'unfolding_matching_down_raw', 'unfolding_matching_up_raw', ]
     for u_file in unfolding_files:
         full_path = getattr(self.config_7TeV, u_file)
         self.assertEqual(full_path.count('7TeV'), 2)
         full_path = getattr(self.config_8TeV, u_file)
         self.assertEqual(full_path.count('8TeV'), 2)
Beispiel #4
0
def getattr(obj, args):
    """Try to get an attribute from an object.

    Example: {% if block|getattr:"editable,True" %}

    If the object is a dictionary, then use the name as a key instead.

    Beware that the default is always a string, if you want this
    to return False, pass an empty second argument:
    {% if block|getattr:"editable," %}
    """
    splitargs = args.split(',')
    try:
        (attribute, default) = splitargs
    except ValueError:
        (attribute, default) = args, ''

    val = __builtin__.getattr(obj, attribute, None)
    if val is None:
        try:
            val = obj.get(attribute, default)
        except AttributeError:
            val = default

    if hasattr(val, '__call__'):
        return val.__call__()
    else:
        return val
Beispiel #5
0
    def previous_value(self,
                       value_if_pos=None,
                       value_if_not_pos=None,
                       attr_if_pos=None):
        """Returns the value of an attribute from a previous instance if the hiv_result
        of the previous instance is POS using a previous subject_visit. If the attribute
        provided in attr_if_pos does not exist, then it will return the value of
        \'value_if_pos\'. If there is no previous visit, returns the value of \'value_if_not_pos\'."""
        value = value_if_not_pos
        if self.previous_subject_visits:
            current_subject_visit = copy(self.subject_visit)
            previous_subject_visits = copy(self.previous_subject_visits)
            for subject_visit in previous_subject_visits:
                # check for POS result from previous data
                self.subject_visit = subject_visit
                if self.hiv_result == POS:
                    try:
                        value = None
                        for attr in attr_if_pos:
                            value = getattr(value or self, attr)
                            try:
                                value = value()  # call if not property
                            except TypeError:
                                pass
                    except TypeError:  # attr_if_pos is None, not iterable
                        value = value_if_pos
#                     except AttributeError:# 'SubjectStatusHelper' object has no attribute 'date'
#                         value = value_if_pos
                    break  # got one!
            self.subject_visit = current_subject_visit
        return value
Beispiel #6
0
 def load_build(self):
     stack = self.stack
     state = stack.pop()
     inst = stack[-1]
     setstate = getattr(inst, "__setstate__", None)
     if setstate:
         setstate(state)
         return
     slotstate = None
     if isinstance(state, tuple) and len(state) == 2:
         state, slotstate = state
     if state:
         try:
             inst.__dict__.update(state)
         except RuntimeError:
             # XXX In restricted execution, the instance's __dict__
             # is not accessible.  Use the old way of unpickling
             # the instance variables.  This is a semantic
             # difference when unpickling in restricted
             # vs. unrestricted modes.
             # Note, however, that cPickle has never tried to do the
             # .update() business, and always uses
             #     PyObject_SetItem(inst.__dict__, key, value) in a
             # loop over state.items().
             for k, v in state.items():
                 setattr(inst, k, v)
     if slotstate:
         for k, v in slotstate.items():
             setattr(inst, k, v)
Beispiel #7
0
 def load_build(self):
     stack = self.stack
     state = stack.pop()
     inst = stack[-1]
     setstate = getattr(inst, "__setstate__", None)
     if setstate:
         setstate(state)
         return
     slotstate = None
     if isinstance(state, tuple) and len(state) == 2:
         state, slotstate = state
     if state:
         try:
             inst.__dict__.update(state)
         except RuntimeError:
             # XXX In restricted execution, the instance's __dict__
             # is not accessible.  Use the old way of unpickling
             # the instance variables.  This is a semantic
             # difference when unpickling in restricted
             # vs. unrestricted modes.
             # Note, however, that cPickle has never tried to do the
             # .update() business, and always uses
             #     PyObject_SetItem(inst.__dict__, key, value) in a
             # loop over state.items().
             for k, v in state.items():
                 setattr(inst, k, v)
     if slotstate:
         for k, v in slotstate.items():
             setattr(inst, k, v)
Beispiel #8
0
	def action(self):
		mode = getattr(self.totem_object,'SleepPluginMode')
		self.totem_object.SleepPluginMode = SLEEP_MODE_DISABLED
		
		if mode == SLEEP_MODE_SHUTDOWN:
			# Get the D-Bus session bus
			bus = dbus.SystemBus()
			# Access the Tomboy D-Bus object
			obj = bus.get_object('org.freedesktop.ConsoleKit', '/org/freedesktop/ConsoleKit/Manager')
			
			# Access the Tomboy remote control interface
			powerman = dbus.Interface(obj, "org.freedesktop.ConsoleKit.Manager")
			
			if powerman.CanStop():
				powerman.Stop()
			else :
				print "The system cant shutdown"
		
		elif mode == SLEEP_MODE_HIBERNATE:
			# Get the D-Bus session bus
			bus = dbus.SystemBus()
			# Access the Tomboy D-Bus object
			obj = bus.get_object("org.freedesktop.UPower", "/org/freedesktop/UPower")
			
			
			# Access the Tomboy remote control interface
			powerman = dbus.Interface(obj, "org.freedesktop.UPower")
			
			if powerman.HibernateAllowed():
				powerman.Hibernate()
			else :
				print "The system cant hibernate"
Beispiel #9
0
    def change_all(
        cls, level=(), buffer=(), terminator=(), colored_format=(),
        format=()
    ):
# #
        '''
            This method changes the given properties to all created logger \
            instances and saves the given properties as default properties \
            for future created logger instances.

            Note that every argument except buffer setted to "None" will not \
            edit this logger component. If you don't want to change buffer \
            leave it by its default value.

            **level**          - sets levels for all logger

            **buffer**         - sets buffers for all logger

            **terminator**     - sets an ending char for each log message in \
                                 each logger

            **colored_format** - sets templates for colored logging messages \
                                 in returned logger

            **format**         - sets templates for logging messages in each \
                                 logger

            Examples:

            >>> Logger.change_all() # doctest: +ELLIPSIS
            <class ...Logger...>
        '''
        cls._set_properties(level, buffer, terminator, colored_format, format)
        for logger in cls.instances:
# # python3.5             new_handler = logger.handlers.copy()
            new_handler = copy(logger.handlers)
            if buffer:
                new_handler = []
                for new_buffer in cls.buffer:
                    new_handler.append(LoggingStreamHandler(stream=new_buffer))
            for handler, level, terminator, colored_format, format in \
            builtins.zip(
                new_handler, cls.level, cls.terminator, cls.colored_format,
                cls.format
            ):
                # TODO check new branches.
                if colored_format is None:
                    handler.setFormatter(LoggingFormatter(format))
                else:
                    handler.setFormatter(ColoredLoggingFormatter(
                        colored_format))
                handler.terminator = terminator
                handler.setLevel(level.upper())
            for handler in logger.handlers:
                logger.removeHandler(handler)
            for handler in new_handler:
                logger.addHandler(handler)
            logger.setLevel(builtins.getattr(logging, cls.level[0].upper()))
        return cls
Beispiel #10
0
    def _generate_logger(
        cls, name, level, buffer, terminator, colored_format, format
    ):
# #
        '''
            Creates a new logger instance by initializing all its components \
            with given arguments or default properties saved as class \
            properties.

            Examples:

            >>> Logger._generate_logger(
            ...     'test', ('info',), (Buffer(),), ('',), (''), ('',)
            ... ) # doctest: +ELLIPSIS
            <logging.Logger object at ...>
        '''
        # TODO check branches.
        properties = []
        for property_name in (
            'level', 'buffer', 'terminator', 'colored_format', 'format'
        ):
            properties.append(
                builtins.locals()[property_name] if builtins.locals()[
                    property_name
                ] else builtins.getattr(cls, property_name))
        for handler in getLogger(name).handlers:
            getLogger(name).removeHandler(handler)
        logger = getLogger(name)
        logger.propagate = False
        for _level, _buffer, _terminator, _colored_format, _format in \
        builtins.zip(properties[0], properties[1], properties[2], properties[3], properties[4]):
            handler = LoggingStreamHandler(stream=_buffer)
            handler.terminator = _terminator
            handler.setLevel(_level.upper())
            # TODO check new branches
            if _colored_format is None:
                handler.setFormatter(LoggingFormatter(_format))
            else:
                handler.setFormatter(ColoredLoggingFormatter(_colored_format))
            logger.addHandler(handler)
        '''
            Set meta logger level to first given level (level is first \
            property).
        '''
        logger.setLevel(builtins.getattr(logging, properties[0][0].upper()))
        return logger
    def __init__(self, *arguments, **keywords):
# #
        '''
            Initializes a joint point for saved function call.

            **return_value** - Return value to set as return value for \
                               wrapped function.

            Arguments and keywords are forwarded to the "JointPointHandler" \
            initialize method.

            Examples:


            >>> class A:
            ...     def a(self): pass

            >>> ReturnJointPoint(
            ...     A, A(), A().a, (), {}, return_value=None
            ... ) # doctest: +ELLIPSIS
            Object of "ReturnJointPoint" with class object "A", object "...".

            >>> ReturnJointPoint(
            ...     A, A(), A().a, (), {}, None
            ... ) # doctest: +ELLIPSIS
            Object of "ReturnJointPoint" with class object "A", object "...".
        '''
        '''Take this method via introspection from super classes.'''
        builtins.getattr(ReturnAspect, inspect.stack()[0][3])(self)

        # # # region properties

        if keywords:
            self.return_value = keywords['return_value']
            del keywords['return_value']
        else:
            self.return_value = arguments[-1]
            arguments = arguments[:-1]

        # # # endregion

        '''Take this method via introspection from super classes.'''
        return builtins.getattr(
            JointPointHandler, inspect.stack()[0][3]
        )(self, *arguments, **keywords)
Beispiel #12
0
	def do_deactivate(self):
		data = getattr(self._totem,'ShutdownPluginInfo')
        
		manager = self._totem.get_ui_manager()
		manager.remove_ui(data['ui_id'])
		manager.remove_action_group(data['action_group'])
		manager.ensure_update()
		
		self._totem.ShutdownPluginInfo = None
		self._totem = None
Beispiel #13
0
	def show(self):
		mode = getattr(self.totem_object, 'SleepPluginMode')
		self.time = WARNING_TIMEOUT
		
		self.update_time()
		self.button_now.set_label( "%s now" % mode.title() )
		
		self.countdown = CountdownThread(self)
		self.countdown.start()
		
		self.window.show()
Beispiel #14
0
def methodname(obj): #{{{
    o = None
    mt = methodtype(obj)
    if mt == METHODTYPE_CLASS:
        o = obj.im_class
    elif mt == METHODTYPE_INSTANCE:
        o = obj.im_self
    else:
        return
    for i in _ab.dir(o):
        if _ab.getattr(o, i) == obj:
            return i
Beispiel #15
0
 def getattr(cls, obj, name, *args):
     """ Overrides the builtin `getattr` function within LimitedExec
         scripts.  This version checks that the given attribute is
         permissible.
     """
     if name.startswith(config.names.LTDEXEC_PRIVATE_PREFIX):
         m = 'Cannot access attribute "{0}".'.format(name)
         raise exceptions.LXPrivateAttrError(m)
     elif name in cls.forbidden_attrs_set:
         m = 'Cannot access attribute "{0}".'.format(name)
         raise exceptions.ForbiddenAttrError(m)
     return __builtin__.getattr(obj, name, *args)
Beispiel #16
0
    def save_global(self, obj, name=None, pack=struct.pack):
        write = self.write
        memo = self.memo

        if name is None:
            name = obj.__name__

        module = getattr(obj, "__module__", None)
        if module is None:
            module = whichmodule(obj, name)

        try:
            __import__(module)
            mod = sys.modules[module]
            klass = getattr(mod, name)
        except (ImportError, KeyError, AttributeError):
            raise PicklingError(
                "Can't pickle %r: it's not found as %s.%s" %
                (obj, module, name))
        else:
            if klass is not obj:
                raise PicklingError(
                    "Can't pickle %r: it's not the same object as %s.%s" %
                    (obj, module, name))

        if self.proto >= 2:
            code = _extension_registry.get((module, name))
            if code:
                assert code > 0
                if code <= 0xff:
                    write(EXT1 + chr(code))
                elif code <= 0xffff:
                    write("%c%c%c" % (EXT2, code&0xff, code>>8))
                else:
                    write(EXT4 + pack("<i", code))
                return

        write(GLOBAL + module + '\n' + name + '\n')
        self.memoize(obj)
    def _set_attribute_helper(self, name, value):
        # #
        """
            Helper method for "self.__setattr__()". Does the actual overwrite \
            process on the given property.

            **name** - is the inaccessible property name.

            **value** - is the new value for the given property name.

            Returns "True" if the given property was successful overwritten \
            or "False" otherwise.
        """
        name = "_%s" % name
        setter_name = "set%s" % name
        if self.is_method(name=setter_name):
            builtins.getattr(self, setter_name)(value)
            return True
        elif self.is_method(name="set"):
            self.set(name, value)
            return True
        return False
Beispiel #18
0
    def save_global(self, obj, name=None, pack=struct.pack):
        write = self.write
        memo = self.memo

        if name is None:
            name = obj.__name__

        module = getattr(obj, "__module__", None)
        if module is None:
            module = whichmodule(obj, name)

        try:
            __import__(module)
            mod = sys.modules[module]
            klass = getattr(mod, name)
        except (ImportError, KeyError, AttributeError):
            raise PicklingError(
                "Can't pickle %r: it's not found as %s.%s" %
                (obj, module, name))
        else:
            if klass is not obj:
                raise PicklingError(
                    "Can't pickle %r: it's not the same object as %s.%s" %
                    (obj, module, name))

        if self.proto >= 2:
            code = _extension_registry.get((module, name))
            if code:
                assert code > 0
                if code <= 0xff:
                    write(EXT1 + chr(code))
                elif code <= 0xffff:
                    write("%c%c%c" % (EXT2, code&0xff, code>>8))
                else:
                    write(EXT4 + pack("<i", code))
                return

        write(GLOBAL + module + '\n' + name + '\n')
        self.memoize(obj)
 def return_handler(advice):
     '''
         Supports classes, simple functions or methods as triggered \
         return handler.
     '''
     if 'return' == advice['event']:
         self.return_value = advice['callback'](
             self.class_object, self.object,
             self.__func__, self.arguments, self.keywords,
             return_value)
         if(builtins.hasattr(advice['callback'], 'aspect') and
            builtins.callable(
                builtins.getattr(advice['callback'], 'aspect'))):
             self.return_value = self.return_value.aspect()
Beispiel #20
0
    def _initialize(
            self, _url, width_in_pixel=800, height_in_pixel=600,
            fullscreen=False, no_window_decoration=False,
            default_gui_toolkit='qt', no_progress_bar=False,
            default_title='No gui loaded.', stop_order='stop', **keywords):
# #
        '''
            Initializes a web view or tries to open a default browser if no \
            gui suitable gui toolkit is available.
        '''
        self.__class__.instances.append(self)

        # # # region properties

        '''Dynamic runtime objects for constructing a simple web window.'''
        self.window = self.scroller = self.vbox = self.progress_bar = \
            self.browser = None
        '''Trigger setter for right url formatting.'''
        self.url = self._url
        '''
            If setted "True" window will be closed on next gtk main iteration.
        '''
        self._gtk_close = False
        __logger__.info(
            'Start web gui with gui toolkit "%s".', self.gui_toolkit)
        if not __test_mode__:
            '''
                This lock object handles to wait until all gtk windows are \
                closed before the program terminates.
            '''
            self._close_gtk_windows_lock = threading.Lock()
            self._close_gtk_windows_lock.acquire()

        # # # endregion

# # python3.5
# #             browser_thread = threading.Thread(
# #                 target=builtins.getattr(
# #                     self, '_initialize_%s_browser' % self.gui_toolkit),
# #                 daemon=True
# #             ).start()
            browser_thread = threading.Thread(target=builtins.getattr(
                self, '_initialize_%s_browser' % self.gui_toolkit))
            browser_thread.daemon = True
            browser_thread.start()
# #
            if self.stop_order:
                self.wait_for_order()
        return self
Beispiel #21
0
	def show(self, totem_object):
		self.totem_object = totem_object
		
		mode = getattr(self.totem_object,'SleepPluginMode',None)
		
		if mode == SLEEP_MODE_DISABLED or mode == None:
			self.radio_disabled.set_active(True)
		
		elif mode == SLEEP_MODE_SHUTDOWN:
			self.radio_shutdown.set_active(True)
		
		elif mode == SLEEP_MODE_HIBERNATE:
			self.radio_hibernate.set_active(True)
		
		self.window.show()
Beispiel #22
0
def whichmodule(func, funcname):
    """Figure out the module in which a function occurs.

    Search sys.modules for the module.
    Cache in classmap.
    Return a module name.
    If the function cannot be found, return "__main__".
    """
    # Python functions should always get an __module__ from their globals.
    mod = getattr(func, "__module__", None)
    if mod is not None:
        return mod
    if func in classmap:
        return classmap[func]

    for name, module in sys.modules.items():
        if module is None:
            continue # skip dummy package entries
        if name != '__main__' and getattr(module, funcname, None) is func:
            break
    else:
        name = '__main__'
    classmap[func] = name
    return name
Beispiel #23
0
    def maybe(self, func, *args, **kwargs):
        r""" Call ``func'' with the provided arguments.

        Return ``None'' on ´´AssertionError''.
        """
        if not isinstance(self, ParserSkeleton):
            raise RuntimeError, type(self)  # expecting ParserSkeleton
        if not getattr(self, func.__name__) == func:
            raise ValueError, repr(func)  # must belong to self

        try:
            with self:  # revert state on error
                return func(*args, **kwargs)
        except AssertionError:
            return None
Beispiel #24
0
def whichmodule(func, funcname):
    """Figure out the module in which a function occurs.

    Search sys.modules for the module.
    Cache in classmap.
    Return a module name.
    If the function cannot be found, return "__main__".
    """
    # Python functions should always get an __module__ from their globals.
    mod = getattr(func, "__module__", None)
    if mod is not None:
        return mod
    if func in classmap:
        return classmap[func]

    for name, module in sys.modules.items():
        if module is None:
            continue  # skip dummy package entries
        if name != '__main__' and getattr(module, funcname, None) is func:
            break
    else:
        name = '__main__'
    classmap[func] = name
    return name
Beispiel #25
0
 def format(self, record):
     '''Appends the level specified color to the logging output.'''
     levelname = record.levelname
     if levelname in self.COLOR_TO_LEVEL_MAPPING:
         record.levelname = (
             SET_ATTRIBUTE_MODE % RESET_ATTRIBUTE_MODE
         ) + (
             SET_ATTRIBUTE_MODE % COLOR['foreground'][
                 self.COLOR_TO_LEVEL_MAPPING[levelname]
             ]
         ) + levelname + (SET_ATTRIBUTE_MODE % RESET_ATTRIBUTE_MODE)
     '''
         Take this method type by another instance of this class via \
         introspection.
     '''
     return builtins.getattr(builtins.super(
         self.__class__, self
     ), inspect.stack()[0][3])(record)
    def __get__(self, object, class_object=None):
# #
        '''
            Triggers when wrapped function should be graped from instance. \
            Saves bounded object instance and class.

            **object**       - Contains the function bounded instance.

            **class_object** - Contains the function bounded class.

            Examples:

            >>> function_decorator = FunctionDecorator(
            ...     FunctionDecorator.__get__)
            >>> function_decorator.__get__(
            ...     function_decorator
            ... ) # doctest: +ELLIPSIS
            <...FunctionDecorator.__get__...>
        '''
        if self.object is not None and self.__func__.__name__ == '_include':
            '''
                If same function was called twice in same context (recursion) \
                create a new instance for it manually. Note that each \
                function is only represented by a single JointPoint instance.
            '''
            recursive_instance = self.__class__(self.__func__)
            '''
                Restore old information about given class to function \
                (method_type).
            '''
            recursive_instance.method_type = self.method_type
            recursive_instance.class_object = self.class_object
            recursive_instance.wrapped_decorator = self.wrapped_decorator
            return recursive_instance.__get__(object, class_object)
        if self.wrapped_decorator is not None:
            self.__func__ = builtins.getattr(
                self.wrapped_decorator, inspect.stack()[0][3]
            )(object, class_object)
        self.object = object
        self.class_object = class_object
        if self.class_object is None:
            self.class_object = object.__class__
        return self.get_wrapper_function()
        def call_handler(advice):
            '''
                Supports classes, simple functions or methods as triggered \
                call handler.

                **advice** - Dictionary saving the advice properties.

                Returns "True" if we have a "call" event and the functions \
                return value if we have "return" event.
            '''
            if 'call' == advice['event']:
                result = advice['callback'](
                    self.class_object, self.object, self.__func__,
                    self.arguments, self.keywords)
                if(builtins.hasattr(advice['callback'], 'aspect') and
                   builtins.isinstance(
                       builtins.getattr(advice['callback'], 'aspect'),
                       (Method, Function))):
                    result = result.aspect()
                return result is not False
            return True
Beispiel #28
0
    def stop(self, *arguments, **keywords):
# #
        '''
            Closes all created web views. Note that in case of using the \
            default installed browser fall-back this instance couldn't be \
            destroyed.

            Examples:

            >>> Browser('google.de').stop() # doctest: +ELLIPSIS
            Object of "Browser" with url "http://google.de" in 800 pixel...
        '''
# # python3.5
# #         pass
        reason, keywords = Dictionary(content=keywords).pop_from_keywords(
            name='reason', default_value='')
# #
        if self.__dict__.get('window') is not None:
            if self.gui_toolkit == 'qt':
                self.window.closeAllWindows()
                if not (builtins.len(arguments) or reason):
                    reason = 'clicking qt close button'
            elif self.gui_toolkit == 'gtk':
                self._gtk_close = True
                if builtins.len(arguments) and builtins.isinstance(
                    arguments[0], gtk.Window
                ):
                    reason = 'clicking gtk close button'
                else:
                    '''
                        NOTE: We got a close trigger from another thread as \
                        where the main gtk loop is present. We have to wait \
                        until gtk has finished it's closing procedures.
                    '''
                    self._close_gtk_windows_lock.acquire()
            __logger__.info('All "%s" windows closed.', self.gui_toolkit)
        '''Take this method type by the abstract class via introspection.'''
        return builtins.getattr(
            builtins.super(self.__class__, self), inspect.stack()[0][3]
        )(*arguments, reason=reason, **keywords)
Beispiel #29
0
    def test_configuration(self):
        """
        Creates a new configuration file loads it and tests if a setting is
        loaded correctly.
        """
        key = 'testing'
        value = 'setting'
        path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            'test.ini')

        test_file = open(path, 'w+')
        test_file.write('[madmex]\n')
        test_file.write('%s=%s\n' % (key, value))
        test_file.close()

        from madmex.configuration import ENVIRONMENT_VARIABLE
        SETTINGS.reload()
        os.environ[ENVIRONMENT_VARIABLE] = path
        SETTINGS.reload()
        self.assertEqual(value, getattr(SETTINGS, key.upper()))
        del os.environ[ENVIRONMENT_VARIABLE]
        os.remove(path)
Beispiel #30
0
    def __new__(
        cls, class_name, base_classes, class_scope, *arguments, **keywords
    ):
# #
        '''
            Triggers if a new instance is created. Set the default name for \
            an orm instance.

            **class_name**   - Name of class to create.

            **base_classes** - A tuple of base classes for class to create.

            **class_scope**  - A dictionary object to define properties and \
                               methods for new class.

            Additional arguments and keywords are forwarded to python's \
            native "builtins.type" function.

            Returns the newly created class.

            Examples:

            >>> if sys.version_info.major < 3:
            ...     class UserModel: __metaclass__ = Model
            ... else:
            ...     exec('class UserModel(metaclass=Model): pass')
        '''
        from boostnode.extension.native import String

        class_scope['__table_name__'] = String(
            class_name
        ).camel_case_to_delimited.content
        class_scope['__tablename__'] = class_scope['__table_name__']
        class_scope['db_table'] = class_scope['__table_name__']
        '''Take this method name via introspection.'''
        return builtins.getattr(
            builtins.super(Model, cls), inspect.stack()[0][3]
        )(cls, class_name, base_classes, class_scope, *arguments, **keywords)
Beispiel #31
0
 def test_configuration(self):
     """
     Creates a new configuration file loads it and tests if a setting is
     loaded correctly.
     """
     key = 'testing'
     value = 'setting'
     path = os.path.join(
         os.path.dirname(os.path.realpath(__file__)),
         'test.ini'
     )
     
     test_file = open(path, 'w+')
     test_file.write('[madmex]\n')
     test_file.write('%s=%s\n' % (key, value))
     test_file.close()
     
     from madmex.configuration import ENVIRONMENT_VARIABLE
     SETTINGS.reload()
     os.environ[ENVIRONMENT_VARIABLE] = path
     SETTINGS.reload()
     self.assertEqual(value, getattr(SETTINGS, key.upper()))
     del os.environ[ENVIRONMENT_VARIABLE]
     os.remove(path)
Beispiel #32
0
 def generate_payment_id(self):
     order_id_field = PaymentProcessor.get_backend_setting(
         'order_unique_id_field', 'id')
     order_id = getattr(self.payment.order, order_id_field)
     return six.text_type(u'{}{}'.format(order_id, self.payment.pk))
Beispiel #33
0
	def update_time(self):
		mode = getattr(self.totem_object,'SleepPluginMode')
		
		time_message = human_time(self.time)
		
		self.label_message.set_text( "This system will %s in %s." % (mode, time_message) )
Beispiel #34
0
    def save_reduce(self, func, args, state=None,
                    listitems=None, dictitems=None, obj=None):
        # This API is called by some subclasses

        # Assert that args is a tuple or None
        if not isinstance(args, TupleType):
            raise PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not callable(func):
            raise PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            # A __reduce__ implementation can direct protocol 2 to
            # use the more efficient NEWOBJ opcode, while still
            # allowing protocol 0 and 1 to work normally.  For this to
            # work, the function returned by __reduce__ should be
            # called __newobj__, and its first argument should be a
            # new-style class.  The implementation for __newobj__
            # should be as follows, although pickle has no way to
            # verify this:
            #
            # def __newobj__(cls, *args):
            #     return cls.__new__(cls, *args)
            #
            # Protocols 0 and 1 will pickle a reference to __newobj__,
            # while protocol 2 (and above) will pickle a reference to
            # cls, the remaining args tuple, and the NEWOBJ code,
            # which calls cls.__new__(cls, *args) at unpickling time
            # (see load_newobj below).  If __reduce__ returns a
            # three-tuple, the state from the third tuple item will be
            # pickled regardless of the protocol, calling __setstate__
            # at unpickling time (see load_build below).
            #
            # Note that no standard __newobj__ implementation exists;
            # you have to provide your own.  This is to enforce
            # compatibility with Python 2.2 (pickles written using
            # protocol 0 or 1 in Python 2.3 should be unpicklable by
            # Python 2.2).
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)
            save(args)
            write(NEWOBJ)
        else:
            save(func)
            save(args)
            write(REDUCE)

        if obj is not None:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(BUILD)
Beispiel #35
0
    def save(self, obj):
        # Check for persistent id (defined by a subclass)
        pid = self.persistent_id(obj)
        if pid:
            self.save_pers(pid)
            return

        # Check the memo
        x = self.memo.get(id(obj))
        if x:
            self.write(self.get(x[0]))
            return

        # Check the type dispatch table
        t = type(obj)
        f = self.dispatch.get(t)
        if f:
            f(self, obj) # Call unbound method with explicit self
            return

        # Check for a class with a custom metaclass; treat as regular class
        try:
            issc = issubclass(t, TypeType)
        except TypeError: # t is not a class (old Boost; see SF #502085)
            issc = 0
        if issc:
            self.save_global(obj)
            return

        # Check copy_reg.dispatch_table
        reduce = dispatch_table.get(t)
        if reduce:
            rv = reduce(obj)
        else:
            # Check for a __reduce_ex__ method, fall back to __reduce__
            reduce = getattr(obj, "__reduce_ex__", None)
            if reduce:
                rv = reduce(self.proto)
            else:
                reduce = getattr(obj, "__reduce__", None)
                if reduce:
                    rv = reduce()
                else:
                    raise PicklingError("Can't pickle %r object: %r" %
                                        (t.__name__, obj))

        # Check for string returned by reduce(), meaning "save as global"
        if type(rv) is StringType:
            self.save_global(obj, rv)
            return

        # Assert that reduce() returned a tuple
        if type(rv) is not TupleType:
            raise PicklingError("%s must return string or tuple" % reduce)

        # Assert that it returned an appropriately sized tuple
        l = len(rv)
        if not (2 <= l <= 5):
            raise PicklingError("Tuple returned by %s must have "
                                "two to five elements" % reduce)

        # Save the reduce() output and finally memoize the object
        self.save_reduce(obj=obj, *rv)
Beispiel #36
0
 def find_class(self, module, name):
     # Subclasses may override this
     __import__(module)
     mod = sys.modules[module]
     klass = getattr(mod, name)
     return klass
Beispiel #37
0
    def save(self, obj):
        # Check for persistent id (defined by a subclass)
        pid = self.persistent_id(obj)
        if pid:
            self.save_pers(pid)
            return

        # Check the memo
        x = self.memo.get(id(obj))
        if x:
            self.write(self.get(x[0]))
            return

        # Check the type dispatch table
        t = type(obj)
        f = self.dispatch.get(t)
        if f:
            f(self, obj)  # Call unbound method with explicit self
            return

        # Check for a class with a custom metaclass; treat as regular class
        try:
            issc = issubclass(t, TypeType)
        except TypeError:  # t is not a class (old Boost; see SF #502085)
            issc = 0
        if issc:
            self.save_global(obj)
            return

        # Check copy_reg.dispatch_table
        reduce = dispatch_table.get(t)
        if reduce:
            rv = reduce(obj)
        else:
            # Check for a __reduce_ex__ method, fall back to __reduce__
            reduce = getattr(obj, "__reduce_ex__", None)
            if reduce:
                rv = reduce(self.proto)
            else:
                reduce = getattr(obj, "__reduce__", None)
                if reduce:
                    rv = reduce()
                else:
                    raise PicklingError("Can't pickle %r object: %r" %
                                        (t.__name__, obj))

        # Check for string returned by reduce(), meaning "save as global"
        if type(rv) is StringType:
            self.save_global(obj, rv)
            return

        # Assert that reduce() returned a tuple
        if type(rv) is not TupleType:
            raise PicklingError("%s must return string or tuple" % reduce)

        # Assert that it returned an appropriately sized tuple
        l = len(rv)
        if not (2 <= l <= 5):
            raise PicklingError("Tuple returned by %s must have "
                                "two to five elements" % reduce)

        # Save the reduce() output and finally memoize the object
        self.save_reduce(obj=obj, *rv)
Beispiel #38
0
def _uno_struct__getattr__(self,name):
    return __builtin__.getattr(self.__dict__["value"],name)
Beispiel #39
0
def getattr(attr, obj):
  return __builtin__.getattr(obj, attr)
Beispiel #40
0
 def find_class(self, module, name):
     # Subclasses may override this
     __import__(module)
     mod = sys.modules[module]
     klass = getattr(mod, name)
     return klass
Beispiel #41
0
    def save_reduce(self,
                    func,
                    args,
                    state=None,
                    listitems=None,
                    dictitems=None,
                    obj=None):
        # This API is called by some subclasses

        # Assert that args is a tuple or None
        if not isinstance(args, TupleType):
            raise PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not callable(func):
            raise PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            # A __reduce__ implementation can direct protocol 2 to
            # use the more efficient NEWOBJ opcode, while still
            # allowing protocol 0 and 1 to work normally.  For this to
            # work, the function returned by __reduce__ should be
            # called __newobj__, and its first argument should be a
            # new-style class.  The implementation for __newobj__
            # should be as follows, although pickle has no way to
            # verify this:
            #
            # def __newobj__(cls, *args):
            #     return cls.__new__(cls, *args)
            #
            # Protocols 0 and 1 will pickle a reference to __newobj__,
            # while protocol 2 (and above) will pickle a reference to
            # cls, the remaining args tuple, and the NEWOBJ code,
            # which calls cls.__new__(cls, *args) at unpickling time
            # (see load_newobj below).  If __reduce__ returns a
            # three-tuple, the state from the third tuple item will be
            # pickled regardless of the protocol, calling __setstate__
            # at unpickling time (see load_build below).
            #
            # Note that no standard __newobj__ implementation exists;
            # you have to provide your own.  This is to enforce
            # compatibility with Python 2.2 (pickles written using
            # protocol 0 or 1 in Python 2.3 should be unpicklable by
            # Python 2.2).
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)
            save(args)
            write(NEWOBJ)
        else:
            save(func)
            save(args)
            write(REDUCE)

        if obj is not None:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(BUILD)
Beispiel #42
0
 def generate_payment_id(self):
     order_id_field = PaymentProcessor.get_backend_setting('order_unique_id_field', 'id')
     order_id = getattr(self.payment.order, order_id_field)
     return six.text_type(u'{}{}'.format(order_id, self.payment.pk))
Beispiel #43
0
class Pickler:
    def __init__(self, file, protocol=None):
        """This takes a file-like object for writing a pickle data stream.

        The optional protocol argument tells the pickler to use the
        given protocol; supported protocols are 0, 1, 2.  The default
        protocol is 0, to be backwards compatible.  (Protocol 0 is the
        only protocol that can be written to a file opened in text
        mode and read back successfully.  When using a protocol higher
        than 0, make sure the file is opened in binary mode, both when
        pickling and unpickling.)

        Protocol 1 is more efficient than protocol 0; protocol 2 is
        more efficient than protocol 1.

        Specifying a negative protocol version selects the highest
        protocol version supported.  The higher the protocol used, the
        more recent the version of Python needed to read the pickle
        produced.

        The file parameter must have a write() method that accepts a single
        string argument.  It can thus be an open file object, a StringIO
        object, or any other custom object that meets this interface.

        """
        if protocol is None:
            protocol = 0
        if protocol < 0:
            protocol = HIGHEST_PROTOCOL
        elif not 0 <= protocol <= HIGHEST_PROTOCOL:
            raise ValueError("pickle protocol must be <= %d" %
                             HIGHEST_PROTOCOL)
        self.write = file.write
        self.memo = {}
        self.proto = int(protocol)
        self.bin = protocol >= 1
        self.fast = 0

    def _pickle_moduledict(self, obj):
        try:
            modict = self.module_dict_ids
        except AttributeError:
            modict = {}
            from sys import modules
            for mod in modules.values():
                if isinstance(mod, ModuleType):
                    try:
                        modict[id(mod.__dict__)] = mod
                    except KeyboardInterrupt:
                        raise
                    except:  # obscure: the above can fail for
                        # arbitrary reasons, because of the py lib
                        pass
            self.module_dict_ids = modict

        thisid = id(obj)
        try:
            themodule = modict[thisid]
        except KeyError:
            return None
        from __builtin__ import getattr
        return getattr, (themodule, '__dict__')

    def clear_memo(self):
        """Clears the pickler's "memo".

        The memo is the data structure that remembers which objects the
        pickler has already seen, so that shared or recursive objects are
        pickled by reference and not by value.  This method is useful when
        re-using picklers.

        """
        self.memo.clear()

    def dump(self, obj):
        """Write a pickled representation of obj to the open file."""
        if self.proto >= 2:
            self.write(PROTO + chr(self.proto))
        self.save(obj)
        self.write(STOP)

    def memoize(self, obj):
        """Store an object in the memo."""

        # The Pickler memo is a dictionary mapping object ids to 2-tuples
        # that contain the Unpickler memo key and the object being memoized.
        # The memo key is written to the pickle and will become
        # the key in the Unpickler's memo.  The object is stored in the
        # Pickler memo so that transient objects are kept alive during
        # pickling.

        # The use of the Unpickler memo length as the memo key is just a
        # convention.  The only requirement is that the memo values be unique.
        # But there appears no advantage to any other scheme, and this
        # scheme allows the Unpickler memo to be implemented as a plain (but
        # growable) array, indexed by memo key.
        if self.fast:
            return
        assert id(obj) not in self.memo
        memo_len = len(self.memo)
        self.write(self.put(memo_len))
        self.memo[id(obj)] = memo_len, obj

    # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
    def put(self, i, pack=struct.pack):
        if self.bin:
            if i < 256:
                return BINPUT + chr(i)
            else:
                return LONG_BINPUT + pack("<i", i)

        return PUT + repr(i) + '\n'

    # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
    def get(self, i, pack=struct.pack):
        if self.bin:
            if i < 256:
                return BINGET + chr(i)
            else:
                return LONG_BINGET + pack("<i", i)

        return GET + repr(i) + '\n'

    def save(self, obj):
        # Check for persistent id (defined by a subclass)
        pid = self.persistent_id(obj)
        if pid:
            self.save_pers(pid)
            return

        # Check the memo
        x = self.memo.get(id(obj))
        if x:
            self.write(self.get(x[0]))
            return

        # Check the type dispatch table
        t = type(obj)
        f = self.dispatch.get(t)
        if f:
            f(self, obj)  # Call unbound method with explicit self
            return

        # Check for a class with a custom metaclass; treat as regular class
        try:
            issc = issubclass(t, TypeType)
        except TypeError:  # t is not a class (old Boost; see SF #502085)
            issc = 0
        if issc:
            self.save_global(obj)
            return

        # Check copy_reg.dispatch_table
        reduce = dispatch_table.get(t)
        if reduce:
            rv = reduce(obj)
        else:
            # Check for a __reduce_ex__ method, fall back to __reduce__
            reduce = getattr(obj, "__reduce_ex__", None)
            if reduce:
                rv = reduce(self.proto)
            else:
                reduce = getattr(obj, "__reduce__", None)
                if reduce:
                    rv = reduce()
                else:
                    raise PicklingError("Can't pickle %r object: %r" %
                                        (t.__name__, obj))

        # Check for string returned by reduce(), meaning "save as global"
        if type(rv) is StringType:
            self.save_global(obj, rv)
            return

        # Assert that reduce() returned a tuple
        if type(rv) is not TupleType:
            raise PicklingError("%s must return string or tuple" % reduce)

        # Assert that it returned an appropriately sized tuple
        l = len(rv)
        if not (2 <= l <= 5):
            raise PicklingError("Tuple returned by %s must have "
                                "two to five elements" % reduce)

        # Save the reduce() output and finally memoize the object
        self.save_reduce(obj=obj, *rv)

    def persistent_id(self, obj):
        # This exists so a subclass can override it
        return None

    def save_pers(self, pid):
        # Save a persistent id reference
        if self.bin:
            self.save(pid)
            self.write(BINPERSID)
        else:
            self.write(PERSID + str(pid) + '\n')

    def save_reduce(self,
                    func,
                    args,
                    state=None,
                    listitems=None,
                    dictitems=None,
                    obj=None):
        # This API is called by some subclasses

        # Assert that args is a tuple or None
        if not isinstance(args, TupleType):
            raise PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not callable(func):
            raise PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            # A __reduce__ implementation can direct protocol 2 to
            # use the more efficient NEWOBJ opcode, while still
            # allowing protocol 0 and 1 to work normally.  For this to
            # work, the function returned by __reduce__ should be
            # called __newobj__, and its first argument should be a
            # new-style class.  The implementation for __newobj__
            # should be as follows, although pickle has no way to
            # verify this:
            #
            # def __newobj__(cls, *args):
            #     return cls.__new__(cls, *args)
            #
            # Protocols 0 and 1 will pickle a reference to __newobj__,
            # while protocol 2 (and above) will pickle a reference to
            # cls, the remaining args tuple, and the NEWOBJ code,
            # which calls cls.__new__(cls, *args) at unpickling time
            # (see load_newobj below).  If __reduce__ returns a
            # three-tuple, the state from the third tuple item will be
            # pickled regardless of the protocol, calling __setstate__
            # at unpickling time (see load_build below).
            #
            # Note that no standard __newobj__ implementation exists;
            # you have to provide your own.  This is to enforce
            # compatibility with Python 2.2 (pickles written using
            # protocol 0 or 1 in Python 2.3 should be unpicklable by
            # Python 2.2).
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)
            save(args)
            write(NEWOBJ)
        else:
            save(func)
            save(args)
            write(REDUCE)

        if obj is not None:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(BUILD)

    # Methods below this point are dispatched through the dispatch table

    dispatch = {}

    def save_none(self, obj):
        self.write(NONE)

    dispatch[NoneType] = save_none

    def save_bool(self, obj):
        if self.proto >= 2:
            self.write(obj and NEWTRUE or NEWFALSE)
        else:
            self.write(obj and TRUE or FALSE)

    dispatch[bool] = save_bool

    def save_int(self, obj, pack=struct.pack):
        if self.bin:
            # If the int is small enough to fit in a signed 4-byte 2's-comp
            # format, we can store it more efficiently than the general
            # case.
            # First one- and two-byte unsigned ints:
            if obj >= 0:
                if obj <= 0xff:
                    self.write(BININT1 + chr(obj))
                    return
                if obj <= 0xffff:
                    self.write("%c%c%c" % (BININT2, obj & 0xff, obj >> 8))
                    return
            # Next check for 4-byte signed ints:
            high_bits = obj >> 31  # note that Python shift sign-extends
            if high_bits == 0 or high_bits == -1:
                # All high bits are copies of bit 2**31, so the value
                # fits in a 4-byte signed int.
                self.write(BININT + pack("<i", obj))
                return
        # Text pickle, or int too big to fit in signed 4-byte format.
        self.write(INT + repr(obj) + '\n')

    dispatch[IntType] = save_int

    def save_long(self, obj, pack=struct.pack):
        if self.proto >= 2:
            bytes = encode_long(obj)
            n = len(bytes)
            if n < 256:
                self.write(LONG1 + chr(n) + bytes)
            else:
                self.write(LONG4 + pack("<i", n) + bytes)
            return
        self.write(LONG + repr(obj) + '\n')

    dispatch[LongType] = save_long

    def save_float(self, obj, pack=struct.pack):
        if self.bin:
            self.write(BINFLOAT + pack('>d', obj))
        else:
            self.write(FLOAT + repr(obj) + '\n')

    dispatch[FloatType] = save_float

    def save_string(self, obj, pack=struct.pack):
        if self.bin:
            n = len(obj)
            if n < 256:
                self.write(SHORT_BINSTRING + chr(n) + obj)
            else:
                self.write(BINSTRING + pack("<i", n) + obj)
        else:
            self.write(STRING + repr(obj) + '\n')
        self.memoize(obj)

    dispatch[StringType] = save_string

    def save_unicode(self, obj, pack=struct.pack):
        if self.bin:
            encoding = obj.encode('utf-8')
            n = len(encoding)
            self.write(BINUNICODE + pack("<i", n) + encoding)
        else:
            obj = obj.replace("\\", "\\u005c")
            obj = obj.replace("\n", "\\u000a")
            self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
        self.memoize(obj)

    dispatch[UnicodeType] = save_unicode

    if StringType == UnicodeType:
        # This is true for Jython
        def save_string(self, obj, pack=struct.pack):
            unicode = obj.isunicode()

            if self.bin:
                if unicode:
                    obj = obj.encode("utf-8")
                l = len(obj)
                if l < 256 and not unicode:
                    self.write(SHORT_BINSTRING + chr(l) + obj)
                else:
                    s = pack("<i", l)
                    if unicode:
                        self.write(BINUNICODE + s + obj)
                    else:
                        self.write(BINSTRING + s + obj)
            else:
                if unicode:
                    obj = obj.replace("\\", "\\u005c")
                    obj = obj.replace("\n", "\\u000a")
                    obj = obj.encode('raw-unicode-escape')
                    self.write(UNICODE + obj + '\n')
                else:
                    self.write(STRING + repr(obj) + '\n')
            self.memoize(obj)

        dispatch[StringType] = save_string

    def save_tuple(self, obj):
        write = self.write
        proto = self.proto

        n = len(obj)
        if n == 0:
            if proto:
                write(EMPTY_TUPLE)
            else:
                write(MARK + TUPLE)
            return

        save = self.save
        memo = self.memo
        if n <= 3 and proto >= 2:
            for element in obj:
                save(element)
            # Subtle.  Same as in the big comment below.
            if id(obj) in memo:
                get = self.get(memo[id(obj)][0])
                write(POP * n + get)
            else:
                write(_tuplesize2code[n])
                self.memoize(obj)
            return

        # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
        # has more than 3 elements.
        write(MARK)
        for element in obj:
            save(element)

        if id(obj) in memo:
            # Subtle.  d was not in memo when we entered save_tuple(), so
            # the process of saving the tuple's elements must have saved
            # the tuple itself:  the tuple is recursive.  The proper action
            # now is to throw away everything we put on the stack, and
            # simply GET the tuple (it's already constructed).  This check
            # could have been done in the "for element" loop instead, but
            # recursive tuples are a rare thing.
            get = self.get(memo[id(obj)][0])
            if proto:
                write(POP_MARK + get)
            else:  # proto 0 -- POP_MARK not available
                write(POP * (n + 1) + get)
            return

        # No recursion.
        self.write(TUPLE)
        self.memoize(obj)

    dispatch[TupleType] = save_tuple

    # save_empty_tuple() isn't used by anything in Python 2.3.  However, I
    # found a Pickler subclass in Zope3 that calls it, so it's not harmless
    # to remove it.
    def save_empty_tuple(self, obj):
        self.write(EMPTY_TUPLE)

    def save_list(self, obj):
        write = self.write

        if self.bin:
            write(EMPTY_LIST)
        else:  # proto 0 -- can't use EMPTY_LIST
            write(MARK + LIST)

        self.memoize(obj)
        self._batch_appends(iter(obj))

    dispatch[ListType] = save_list

    # Keep in synch with cPickle's BATCHSIZE.  Nothing will break if it gets
    # out of synch, though.
    _BATCHSIZE = 1000

    def _batch_appends(self, items):
        # Helper to batch up APPENDS sequences
        save = self.save
        write = self.write

        if not self.bin:
            for x in items:
                save(x)
                write(APPEND)
            return

        r = xrange(self._BATCHSIZE)
        while items is not None:
            tmp = []
            for i in r:
                try:
                    x = items.next()
                    tmp.append(x)
                except StopIteration:
                    items = None
                    break
            n = len(tmp)
            if n > 1:
                write(MARK)
                for x in tmp:
                    save(x)
                write(APPENDS)
            elif n:
                save(tmp[0])
                write(APPEND)
            # else tmp is empty, and we're done

    def save_dict(self, obj):
        ## Stackless addition BEGIN
        modict_saver = self._pickle_moduledict(obj)
        if modict_saver is not None:
            return self.save_reduce(*modict_saver)
        ## Stackless addition END

        write = self.write

        if self.bin:
            write(EMPTY_DICT)
        else:  # proto 0 -- can't use EMPTY_DICT
            write(MARK + DICT)

        self.memoize(obj)
        self._batch_setitems(obj.iteritems())

    dispatch[DictionaryType] = save_dict
    if not PyStringMap is None:
        dispatch[PyStringMap] = save_dict

    def _batch_setitems(self, items):
        # Helper to batch up SETITEMS sequences; proto >= 1 only
        save = self.save
        write = self.write

        if not self.bin:
            for k, v in items:
                save(k)
                save(v)
                write(SETITEM)
            return

        r = xrange(self._BATCHSIZE)
        while items is not None:
            tmp = []
            for i in r:
                try:
                    tmp.append(items.next())
                except StopIteration:
                    items = None
                    break
            n = len(tmp)
            if n > 1:
                write(MARK)
                for k, v in tmp:
                    save(k)
                    save(v)
                write(SETITEMS)
            elif n:
                k, v = tmp[0]
                save(k)
                save(v)
                write(SETITEM)
            # else tmp is empty, and we're done

    def save_inst(self, obj):
        cls = obj.__class__

        memo = self.memo
        write = self.write
        save = self.save

        if hasattr(obj, '__getinitargs__'):
            args = obj.__getinitargs__()
            len(args)  # XXX Assert it's a sequence
            _keep_alive(args, memo)
        else:
            args = ()

        write(MARK)

        if self.bin:
            save(cls)
            for arg in args:
                save(arg)
            write(OBJ)
        else:
            for arg in args:
                save(arg)
            write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')

        self.memoize(obj)

        try:
            getstate = obj.__getstate__
        except AttributeError:
            stuff = obj.__dict__
        else:
            stuff = getstate()
            _keep_alive(stuff, memo)
        save(stuff)
        write(BUILD)

    dispatch[InstanceType] = save_inst

    def save_global(self, obj, name=None, pack=struct.pack):
        write = self.write
        memo = self.memo

        if name is None:
            name = obj.__name__

        module = getattr(obj, "__module__", None)
        if module is None:
            module = whichmodule(obj, name)

        try:
            __import__(module)
            mod = sys.modules[module]
            klass = getattr(mod, name)
        except (ImportError, KeyError, AttributeError):
            raise PicklingError("Can't pickle %r: it's not found as %s.%s" %
                                (obj, module, name))
        else:
            if klass is not obj:
                raise PicklingError(
                    "Can't pickle %r: it's not the same object as %s.%s" %
                    (obj, module, name))

        if self.proto >= 2:
            code = _extension_registry.get((module, name))
            if code:
                assert code > 0
                if code <= 0xff:
                    write(EXT1 + chr(code))
                elif code <= 0xffff:
                    write("%c%c%c" % (EXT2, code & 0xff, code >> 8))
                else:
                    write(EXT4 + pack("<i", code))
                return

        write(GLOBAL + module + '\n' + name + '\n')
        self.memoize(obj)

    def save_function(self, obj):
        try:
            return self.save_global(obj)
        except PicklingError, e:
            pass
        # Check copy_reg.dispatch_table
        reduce = dispatch_table.get(type(obj))
        if reduce:
            rv = reduce(obj)
        else:
            # Check for a __reduce_ex__ method, fall back to __reduce__
            reduce = getattr(obj, "__reduce_ex__", None)
            if reduce:
                rv = reduce(self.proto)
            else:
                reduce = getattr(obj, "__reduce__", None)
                if reduce:
                    rv = reduce()
                else:
                    raise e
        return self.save_reduce(obj=obj, *rv)
Beispiel #44
0
def getattr(attr, obj):
    return __builtin__.getattr(obj, attr)
Beispiel #45
0
def _uno_struct__getattr__(self, name):
    return __builtin__.getattr(self.__dict__["value"], name)