Example #1
0
    def loads(self, msg, encoding=None, raw=False):
        '''
        Run the correct loads serialization format

        :param encoding: Useful for Python 3 support. If the msgpack data
                         was encoded using "use_bin_type=True", this will
                         differentiate between the 'bytes' type and the
                         'str' type by decoding contents with 'str' type
                         to what the encoding was set as. Recommended
                         encoding is 'utf-8' when using Python 3.
                         If the msgpack data was not encoded using
                         "use_bin_type=True", it will try to decode
                         all 'bytes' and 'str' data (the distinction has
                         been lost in this case) to what the encoding is
                         set as. In this case, it will fail if any of
                         the contents cannot be converted.
        '''
        try:

            def ext_type_decoder(code, data):
                if code == 78:
                    data = salt.utils.stringutils.to_unicode(data)
                    return datetime.datetime.strptime(data,
                                                      '%Y%m%dT%H:%M:%S.%f')
                return data

            gc.disable()  # performance optimization for msgpack
            if msgpack.version >= (0, 4, 0):
                # msgpack only supports 'encoding' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                try:
                    ret = msgpack.loads(msg,
                                        use_list=True,
                                        ext_hook=ext_type_decoder,
                                        encoding=encoding)
                except UnicodeDecodeError:
                    # msg contains binary data
                    ret = msgpack.loads(msg,
                                        use_list=True,
                                        ext_hook=ext_type_decoder)
            else:
                ret = msgpack.loads(msg,
                                    use_list=True,
                                    ext_hook=ext_type_decoder)
            if six.PY3 and encoding is None and not raw:
                ret = salt.transport.frame.decode_embedded_strs(ret)
        except Exception as exc:
            log.critical(
                'Could not deserialize msgpack message. This often happens '
                'when trying to read a file not in binary mode. '
                'To see message payload, enable debug logging and retry. '
                'Exception: %s', exc)
            log.debug('Msgpack deserialization failure on message: %s', msg)
            gc.collect()
            raise
        finally:
            gc.enable()
        return ret
Example #2
0
    def dumps(self, msg):
        '''
        Run the correct dumps serialization format
        '''
        try:
            return msgpack.dumps(msg)
        except TypeError:
            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj
            return msgpack.dumps(odict_encoder(msg))
        except SystemError as exc:
            log.critical('Unable to serialize message! Consider upgrading msgpack. '
                         'Message which failed was {failed_message} '
                         'with exception {exception_message}').format(msg, exc)
    def dumps(self, msg):
        '''
        Run the correct dumps serialization format
        '''
        try:
            return msgpack.dumps(msg)
        except TypeError:
            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in obj.copy().iteritems():
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj
            return msgpack.dumps(odict_encoder(msg))
        except SystemError as exc:
            log.critical('Unable to serialize message! Consider upgrading msgpack. '
                         'Message which failed was {failed_message} '
                         'with exception {exception_message}').format(msg, exc)
Example #4
0
    def loads(self, msg, encoding=None):
        '''
        Run the correct loads serialization format

        :param encoding: Useful for Python 3 support. If the msgpack data
                         was encoded using "use_bin_type=True", this will
                         differentiate between the 'bytes' type and the
                         'str' type by decoding contents with 'str' type
                         to what the encoding was set as. Recommended
                         encoding is 'utf-8' when using Python 3.
                         If the msgpack data was not encoded using
                         "use_bin_type=True", it will try to decode
                         all 'bytes' and 'str' data (the distinction has
                         been lost in this case) to what the encoding is
                         set as. In this case, it will fail if any of
                         the contents cannot be converted.
        '''
        try:
            gc.disable()  # performance optimization for msgpack
            if msgpack.version >= (0, 4, 0):
                # msgpack only supports 'encoding' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                return msgpack.loads(msg, use_list=True, encoding=encoding)
            else:
                return msgpack.loads(msg, use_list=True)
        except Exception as exc:
            log.critical('Could not deserialize msgpack message: {0}'
                         'This often happens when trying to read a file not in binary mode.'
                         'Please open an issue and include the following error: {1}'.format(msg, exc))
            raise
        finally:
            gc.enable()
Example #5
0
    def _write_state(self):
        '''
        writes a current status to the defined status-file
        this includes the current pid, events received/handled
        and threads created/joined
        '''
        try:
            # write the info to the specified log
            statf = open(self.state_file, 'w')
            statf.writelines(simplejson.dumps({'events_received':self.events_rec,
                                               'events_handled':self.events_han,
                                               'threads_created':self.threads_cre,
                                               'threads_joined':self.threads_join}
                                             ))
            # if we have the same pid as the pidfile, we are the running daemon
            # and also print the current counters to the logfile with 'info'
            if( os.getpid() == self.pid ):
                log.info("running with pid {0}".format(self.pid))
                log.info("events (han/recv): {0}/{1}".format(self.events_han,
                                                             self.events_rec))
                log.info("threads (cre/joi):{0}/{1}".format(self.threads_cre,
                                                            self.threads_join))


            statf.write("\n")
            statf.close()
            sys.stdout.flush()
        except IOError as ioerr:
            log.critical("Failed to write state to {0}".format(self.state_file))
            log.exception(ioerr)
        except OSError as oserr:
            log.critical("Failed to write state to {0}".format(self.state_file))
            log.exception(oserr)
Example #6
0
    def dumps(self, msg):
        '''
        Run the correct dumps serialization format
        '''
        try:
            return msgpack.dumps(msg)
        except OverflowError:
            # msgpack can't handle the very long Python longs for jids
            # Convert any very long longs to strings
            # We borrow the technique used by TypeError below
            def verylong_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = verylong_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = verylong_encoder(entry)
                    return obj
                if six.PY2 and isinstance(obj, long) and long > pow(2, 64):  # pylint: disable=incompatible-py3-code
                    return str(obj)
                elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                    return str(obj)
                else:
                    return obj

            return msgpack.dumps(verylong_encoder(msg))
        except TypeError:
            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj

            return msgpack.dumps(odict_encoder(msg))
        except SystemError as exc:
            log.critical(
                'Unable to serialize message! Consider upgrading msgpack. '
                'Message which failed was {failed_message} '
                'with exception {exception_message}').format(msg, exc)
Example #7
0
def zmq_version():
    '''ZeroMQ python bindings >= 2.1.9 are required'''
    import zmq
    ver = zmq.__version__
    ver_int = int(ver.replace('.', ''))
    if not ver_int >= 219:
        log.critical("ZeroMQ python bindings >= 2.1.9 are required")
        return False
    return True
Example #8
0
File: verify.py Project: rubic/salt
def zmq_version():
    """ZeroMQ python bindings >= 2.1.9 are required"""
    import zmq

    ver = zmq.__version__
    ver_int = int(ver.replace(".", ""))
    if not ver_int >= 219:
        log.critical("ZeroMQ python bindings >= 2.1.9 are required")
        return False
    return True
Example #9
0
 def loads(self, msg):
     '''
     Run the correct loads serialization format
     '''
     try:
         return msgpack.loads(msg, use_list=True)
     except Exception as exc:
         log.critical('Could not deserialize msgpack message: {0}'
                      'This often happens when trying to read a file not in binary mode.'
                      'Please open an issue and include the following error: {1}'.format(msg, exc))
         raise
Example #10
0
def check_root():
    '''
    Most of the salt scripts need to run as root, this function will simply
    verify that root is the user before the application discovers it.
    '''
    if 'os' in os.environ:
        if os.environ['os'].startswith('Windows'):
            return True
    msg = 'Sorry, the salt must run as root.  http://xkcd.com/838'
    if os.getuid():
        log.critical(msg)
        return False
    return True
Example #11
0
 def loads(self, msg):
     '''
     Run the correct loads serialization format
     '''
     try:
         return msgpack.loads(msg, use_list=True)
     except Exception as exc:
         log.critical(
             'Could not deserialize msgpack message: {0}'
             'This often happens when trying to read a file not in binary mode.'
             'Please open an issue and include the following error: {1}'.
             format(msg, exc))
         raise
Example #12
0
File: verify.py Project: rubic/salt
def check_root():
    """
    Most of the salt scripts need to run as root, this function will simply
    verify that root is the user before the application discovers it.
    """
    if "os" in os.environ:
        if os.environ["os"].startswith("Windows"):
            return True
    msg = "Sorry, the salt must run as root.  http://xkcd.com/838"
    if os.getuid():
        log.critical(msg)
        return False
    return True
Example #13
0
 def loads(self, msg):
     '''
     Run the correct loads serialization format
     '''
     try:
         gc.disable()  # performance optimization for msgpack
         return msgpack.loads(msg, use_list=True)
     except Exception as exc:
         log.critical(
             'Could not deserialize msgpack message.'
             'This often happens when trying to read a file not in binary mode'
             'To see message payload, enable debug logging and retry. Exception: {0}'
             .format(exc))
         log.debug(
             'Msgpack deserialization failure on message: {0}'.format(msg))
         raise
     finally:
         gc.enable()
Example #14
0
    def loads(self, msg, encoding=None, raw=False):
        '''
        Run the correct loads serialization format

        :param encoding: Useful for Python 3 support. If the msgpack data
                         was encoded using "use_bin_type=True", this will
                         differentiate between the 'bytes' type and the
                         'str' type by decoding contents with 'str' type
                         to what the encoding was set as. Recommended
                         encoding is 'utf-8' when using Python 3.
                         If the msgpack data was not encoded using
                         "use_bin_type=True", it will try to decode
                         all 'bytes' and 'str' data (the distinction has
                         been lost in this case) to what the encoding is
                         set as. In this case, it will fail if any of
                         the contents cannot be converted.
        '''
        try:
            gc.disable()  # performance optimization for msgpack
            if msgpack.version >= (0, 4, 0):
                # msgpack only supports 'encoding' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                ret = msgpack.loads(msg, use_list=True, encoding=encoding)
            else:
                ret = msgpack.loads(msg, use_list=True)
            if six.PY3 and encoding is None and not raw:
                ret = salt.transport.frame.decode_embedded_strs(ret)
        except Exception as exc:
            log.critical('Could not deserialize msgpack message.'
                         'This often happens when trying to read a file not in binary mode'
                         'To see message payload, enable debug logging and retry. Exception: {0}'.format(exc))
            log.debug('Msgpack deserialization failure on message: {0}'.format(msg))
            gc.collect()
            raise
        finally:
            gc.enable()
        return ret
Example #15
0
    def _pre_startup(self,
                    opts):
        '''
        does a startup-check if all needed parameters are 
        found in the configfile. this is really important
        because we lose stdout in daemon mode and exceptions
        might not be seen by the user
        '''
        required_general = [ 'sock_dir',
                             'node',
                             'max_workers',
                             'id',
                             'event_limit',
                             'pidfile',
                             'state_file',
                             'state_upd',
                             'dump_timer' ]

        for field in required_general:
            if field not in opts['general']:
                log.critical("Missing parameter " + 
                             "'{0}' in configfile".format(field))
                sys.exit(1)
Example #16
0
    def listen(self):
        '''
        the main event loop where we receive the events and
        start the workers that dump our data into the database
        '''
        # log on to saltstacks event-bus
        event = salt.utils.event.SaltEvent(self.node,
                                           self.sock_dir)

        # we store our events in a list, we dont really care about an order
        # or what kind of data is put in there. all that is configured with the
        # sql-template configured in the configfile
        event_queue = []

        # start our dump_timer
        self.ev_timer.start()

        # this is for logline chronology so the timer-message always comes
        # _before_ the actual startup-message of the listening loop below :-)
        time.sleep(1)

        log.info("entering main event loop")
        log.info("listening on: {0}".format(event.puburi))

        # read everything we can get our hands on
        while True:

            # the zmq-socket does not like ^C very much, make the error
            # a little more graceful. alright, alright, ignore the damn thing,
            # we're exiting anyways...
            try:
                ret = event.get_event(full=True)
            except zmq.ZMQError:
                pass

            if ret is None:
                continue

            # if the timer has expired, we may have not received enough
            # events in the queue to reach event_limit, in that case we dump
            # the data anyway to have it in the database
            if(self.ev_timer_ev):
                if (len(self.running_workers) < self.max_workers) and \
                   (len(event_queue) > 0):

                    self._init_worker(event_queue)

                    # reset our queue to prevent duplicate entries
                    del event_queue[:]

                    # we reset the timer.ev_timer_ev  at the end of the loop 
                    # so we can update the stats that are logged


            # filter only the events we're interested in. all events have a tag 
            # we can filter them by. we match with a precompiled regex
            if( 'tag' in ret ):

                # filter out events with an empty tag. those are special
                if( ret['tag'] != '' ):
                       
                    # run through our configured events and try to match the 
                    # current events tag against the ones we're interested in
                    for key in self.event_map.keys():
                        if( self.event_map[key]['tag'].match(ret['tag'])):
                            log.debug("matching on {0}:{1}".format(key, 
                                                                   ret['tag']))

                            prio = self.event_map[key].get('prio', 0)

                            # push prio1-events directly into a worker
                            if prio > 0:
                                log.debug('Prio1 event found, pushing immediately!')
                                self.events_han += 1
                                self._init_worker([ret])
                            else:
                                event_queue.append(ret)
                                self.events_han += 1

            # once we reach the event_limit, start a worker that
            # writes that data in to the database
            if len(event_queue) >= self.event_limit:

                # only start a worker if not too many workers are running
                if len(self.running_workers) < self.max_workers:
                    self._init_worker(event_queue)
                    # reset the timer
                    self.ev_timer.reset()

                    # reset our queue to prevent duplicate entries
                    del event_queue[:]

                else:
                    # FIXME: we need to handle this situation somehow if
                    # too many workers are running. just flush the events?
                    # there really is no sane way except queueing more and more
                    # until some sort of limit is reached and we care more about
                    # our saltmaster than about the collected events!
                    log.critical("too many workers running, loosing data!!!")
                   
            # a list for the workers that are still running
            clean_workers = []

            # run through all the workers and join() the ones
            # that have finished dumping their data and keep
            # the running ones on our list
            for worker in self.running_workers:
                if worker.isAlive():
                    clean_workers.append(worker)
                else:
                    worker.join()
                    log.debug("joined worker #{0}".format(worker.getName()))
                    self.threads_join += 1

            # get rid of the old reference  and set a new one
            # FIXME: is this really neccessary?
            del self.running_workers

            self.running_workers = clean_workers
            self.events_rec += 1

            # we update the stats every 'received div handled == 0'
            # or if we recevied a timer event from our ResetTimer
            if( (self.events_rec % self.state_upd) == 0 ):
                self._write_state()
            elif(self.ev_timer_ev):
                self._write_state()
                self.ev_timer_ev = False

        log.info("listen loop ended...")                
Example #17
0
    def dumps(self, msg, use_bin_type=False):
        '''
        Run the correct dumps serialization format

        :param use_bin_type: Useful for Python 3 support. Tells msgpack to
                             differentiate between 'str' and 'bytes' types
                             by encoding them differently.
                             Since this changes the wire protocol, this
                             option should not be used outside of IPC.
        '''
        # Got this trick from msgpack.fallback._pack
        while True:
            try:
                if msgpack.version >= (0, 4, 0):
                    # msgpack only supports 'use_bin_type' starting in 0.4.0.
                    # Due to this, if we don't need it, don't pass it at all so
                    # that under Python 2 we can still work with older versions
                    # of msgpack.
                    return msgpack.dumps(msg, use_bin_type=use_bin_type)
                else:
                    return msgpack.dumps(msg)
            except (OverflowError, msgpack.exceptions.PackValueError):
                # msgpack can't handle the very long Python longs for jids
                # Convert any very long longs to strings
                # We borrow the technique used by TypeError below
                def verylong_encoder(obj):
                    if isinstance(obj, dict):
                        for key, value in six.iteritems(obj.copy()):
                            obj[key] = verylong_encoder(value)
                        return dict(obj)
                    elif isinstance(obj, (list, tuple)):
                        obj = list(obj)
                        for idx, entry in enumerate(obj):
                            obj[idx] = verylong_encoder(entry)
                        return obj
                    # This is a spurious lint failure as we are gating this check
                    # behind a check for six.PY2.
                    if six.PY2 and isinstance(obj, long) and long > pow(2, 64):  # pylint: disable=incompatible-py3-code
                        return six.text_type(obj)
                    elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                        return six.text_type(obj)
                    else:
                        return obj
                if msgpack.version >= (0, 4, 0):
                    return msgpack.dumps(verylong_encoder(msg), use_bin_type=use_bin_type)
                else:
                    return msgpack.dumps(verylong_encoder(msg))
            except TypeError as exc:
                # msgpack doesn't support datetime.datetime or datetime.date datatype
                # So here we convert it to a string.
                # Note that if you want to be able to decode data, you will have to wrap
                # the object in a msgpack.ExtType-object. The typenumber is not predefined,
                # (I couldn't find a list anywhere), so keep a list somewhere!
                # Also, if you wrap objects as msgpack.ExtType, you HAVE TO provide
                # an ext_hook callable when unpacking (decoding) the msgpack.ExtType object.
                # See also: https://pypi.python.org/pypi/msgpack-python
                def datetime_encode(obj):
                    '''
                    Convert datetime.datetime object to formatted string
                    '''
                    return obj.strftime('%Y%m%dT%H:%M:%S.%f')

                def date_encode(obj):
                    '''
                    Convert datetime.date object to formatted string
                    '''
                    return obj.strftime('%Y%m%d')

                def recursive_encoder(obj, datatype, fn_encode):
                    '''
                    Recursively encodes every instance of datatype found in obj,
                    which can be a dict, list or value of type datatype.
                    Uses fn_encode to do the encoding.
                    '''
                    if datatype in [list, dict, tuple]:
                        raise TypeError('Recursive_encoder called with '
                                        'unsupported datatype: {}'
                                        .format(datatype))
                    if isinstance(obj, dict):
                        for key, value in six.iteritems(obj.copy()):
                            encodedkey = recursive_encoder(key, datatype, fn_encode)
                            if key != encodedkey:
                                del obj[key]
                                key = encodedkey
                            obj[key] = recursive_encoder(value, datatype, fn_encode)
                        return dict(obj)
                    elif isinstance(obj, (list, tuple)):
                        obj = list(obj)
                        for idx, entry in enumerate(obj):
                            obj[idx] = recursive_encoder(entry, datatype, fn_encode)
                        return obj
                    if isinstance(obj, datatype):
                        return fn_encode(obj)
                    else:
                        return obj

                def immutable_encoder(obj):
                    '''
                    Convert immutable dict,list,set to regular dict,list,set
                    '''
                    log.debug('IMMUTABLE OBJ: %s', obj)
                    if isinstance(obj, immutabletypes.ImmutableDict):
                        return dict(obj)
                    if isinstance(obj, immutabletypes.ImmutableList):
                        return list(obj)
                    if isinstance(obj, immutabletypes.ImmutableSet):
                        return set(obj)

                fixed_message_data = None

                if 'datetime.datetime' in six.text_type(exc):
                    fixed_message_data = recursive_encoder(msg, datetime.datetime, datetime_encode)
                elif 'datetime.date' in six.text_type(exc):
                    fixed_message_data = recursive_encoder(msg, datetime.date, date_encode)
                elif 'Immutable' in six.text_type(exc):
                    fixed_message_data = immutable_encoder(msg)

                if fixed_message_data is not None:
                    msg = fixed_message_data
                    # Retry packing the data, just in case it contains multiple
                    # unsupported types
                    continue

                if msgpack.version >= (0, 2, 0):
                    # Should support OrderedDict serialization, so, let's
                    # raise the exception
                    raise

                # msgpack is < 0.2.0, let's make its life easier
                # Since OrderedDict is identified as a dictionary, we can't
                # make use of msgpack custom types, we will need to convert by
                # hand.
                # This means iterating through all elements of a dictionary or
                # list/tuple
                def odict_encoder(obj):
                    if isinstance(obj, dict):
                        for key, value in six.iteritems(obj.copy()):
                            obj[key] = odict_encoder(value)
                        return dict(obj)
                    elif isinstance(obj, (list, tuple)):
                        obj = list(obj)
                        for idx, entry in enumerate(obj):
                            obj[idx] = odict_encoder(entry)
                        return obj
                    return obj
                # We're on < 0.2.0, so no use_bin_type is possible
                return msgpack.dumps(odict_encoder(msg))
            except (SystemError, TypeError) as exc:  # pylint: disable=W0705
                log.critical(
                    'Unable to serialize message! Consider upgrading msgpack. '
                    'Message which failed was %s, with exception %s', msg, exc
                )
                break
Example #18
0
    def dumps(self, msg, use_bin_type=False):
        '''
        Run the correct dumps serialization format

        :param use_bin_type: Useful for Python 3 support. Tells msgpack to
                             differentiate between 'str' and 'bytes' types
                             by encoding them differently.
                             Since this changes the wire protocol, this
                             option should not be used outside of IPC.
        '''
        try:
            if msgpack.version >= (0, 4, 0):
                # msgpack only supports 'use_bin_type' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                return msgpack.dumps(msg, use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(msg)
        except (OverflowError, msgpack.exceptions.PackValueError):
            # msgpack can't handle the very long Python longs for jids
            # Convert any very long longs to strings
            # We borrow the technique used by TypeError below
            def verylong_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = verylong_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = verylong_encoder(entry)
                    return obj
                if six.PY2 and isinstance(obj, long) and long > pow(2, 64):
                    return str(obj)
                elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                    return str(obj)
                else:
                    return obj
            if msgpack.version >= (0, 4, 0):
                return msgpack.dumps(verylong_encoder(msg), use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(verylong_encoder(msg))
        except TypeError as e:
            # msgpack doesn't support datetime.datetime datatype
            # So here we have converted datetime.datetime to custom datatype
            # This is msgpack Extended types numbered 78
            def default(obj):
                return msgpack.ExtType(78, obj)

            def dt_encode(obj):
                datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
                if msgpack.version >= (0, 4, 0):
                    return msgpack.packb(datetime_str, default=default, use_bin_type=use_bin_type)
                else:
                    return msgpack.packb(datetime_str, default=default)

            def datetime_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = datetime_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = datetime_encoder(entry)
                    return obj
                if isinstance(obj, datetime.datetime):
                    return dt_encode(obj)
                else:
                    return obj

            if "datetime.datetime" in str(e):
                if msgpack.version >= (0, 4, 0):
                    return msgpack.dumps(datetime_encoder(msg), use_bin_type=use_bin_type)
                else:
                    return msgpack.dumps(datetime_encoder(msg))

            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj
            if msgpack.version >= (0, 4, 0):
                return msgpack.dumps(odict_encoder(msg), use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(odict_encoder(msg))
        except (SystemError, TypeError) as exc:  # pylint: disable=W0705
            log.critical('Unable to serialize message! Consider upgrading msgpack. '
                         'Message which failed was {failed_message} '
                         'with exception {exception_message}').format(msg, exc)
Example #19
0
    def tune_in(self):
        '''
        Lock onto the publisher. This is the main event loop for the minion
        '''
        log.info(
            '{0} is starting as user \'{1}\''.format(
                self.__class__.__name__,
                getpass.getuser()
            )
        )
        log.debug('Minion "{0}" trying to tune in'.format(self.opts['id']))
        self.context = zmq.Context()

        # Prepare the minion event system
        #
        # Start with the publish socket
        id_hash = hashlib.md5(self.opts['id']).hexdigest()
        epub_sock_path = os.path.join(
            self.opts['sock_dir'],
            'minion_event_{0}_pub.ipc'.format(id_hash)
        )
        epull_sock_path = os.path.join(
            self.opts['sock_dir'],
            'minion_event_{0}_pull.ipc'.format(id_hash)
        )
        self.epub_sock = self.context.socket(zmq.PUB)
        if self.opts.get('ipc_mode', '') == 'tcp':
            epub_uri = 'tcp://127.0.0.1:{0}'.format(
                self.opts['tcp_pub_port']
            )
            epull_uri = 'tcp://127.0.0.1:{0}'.format(
                self.opts['tcp_pull_port']
            )
        else:
            epub_uri = 'ipc://{0}'.format(epub_sock_path)
            salt.utils.check_ipc_path_max_len(epub_uri)
            epull_uri = 'ipc://{0}'.format(epull_sock_path)
            salt.utils.check_ipc_path_max_len(epull_uri)
        log.debug(
            '{0} PUB socket URI: {1}'.format(
                self.__class__.__name__, epub_uri
            )
        )
        log.debug(
            '{0} PULL socket URI: {1}'.format(
                self.__class__.__name__, epull_uri
            )
        )

        # Create the pull socket
        self.epull_sock = self.context.socket(zmq.PULL)
        # Bind the event sockets
        self.epub_sock.bind(epub_uri)
        self.epull_sock.bind(epull_uri)
        # Restrict access to the sockets
        if not self.opts.get('ipc_mode', '') == 'tcp':
            os.chmod(
                epub_sock_path,
                448
            )
            os.chmod(
                epull_sock_path,
                448
            )

        self.poller = zmq.Poller()
        self.epoller = zmq.Poller()
        self.socket = self.context.socket(zmq.SUB)
        self.socket.setsockopt(zmq.SUBSCRIBE, '')
        self.socket.setsockopt(zmq.IDENTITY, self.opts['id'])
        if hasattr(zmq, 'RECONNECT_IVL_MAX'):
            self.socket.setsockopt(
                zmq.RECONNECT_IVL_MAX, self.opts['recon_max']
            )
        if hasattr(zmq, 'TCP_KEEPALIVE'):
            self.socket.setsockopt(
                zmq.TCP_KEEPALIVE, self.opts['tcp_keepalive']
            )
            self.socket.setsockopt(
                zmq.TCP_KEEPALIVE_IDLE, self.opts['tcp_keepalive_idle']
            )
            self.socket.setsockopt(
                zmq.TCP_KEEPALIVE_CNT, self.opts['tcp_keepalive_cnt']
            )
            self.socket.setsockopt(
                zmq.TCP_KEEPALIVE_INTVL, self.opts['tcp_keepalive_intvl']
            )
        if hasattr(zmq, 'IPV4ONLY'):
            self.socket.setsockopt(
                zmq.IPV4ONLY, int(not int(self.opts.get('ipv6_enable', False)))
            )
        self.socket.connect(self.master_pub)
        self.poller.register(self.socket, zmq.POLLIN)
        self.epoller.register(self.epull_sock, zmq.POLLIN)
        # Send an event to the master that the minion is live
        self._fire_master(
            'Minion {0} started at {1}'.format(
            self.opts['id'],
            time.asctime()
            ),
            'minion_start'
        )

        if self.opts['multiprocessing'] and not salt.utils.is_windows():
            signal.signal(signal.SIGCHLD, self.handle_sigchld)
        # Make sure to gracefully handle SIGUSR1
        enable_sigusr1_handler()

        # On first startup execute a state run if configured to do so
        self._state_run()

        while True:
            try:
                self.schedule.eval()
                socks = dict(self.poller.poll(
                    self.opts['loop_interval'] * 1000)
                )
                if self.socket in socks and socks[self.socket] == zmq.POLLIN:
                    payload = self.serial.loads(self.socket.recv())
                    self._handle_payload(payload)
                time.sleep(0.05)
                # Clean up the minion processes which have been executed and
                # have finished
                # Check if modules and grains need to be refreshed
                self.passive_refresh()
                # Check the event system
                if self.epoller.poll(1):
                    try:
                        package = self.epull_sock.recv(zmq.NOBLOCK)
                        self.epub_sock.send(package)
                    except Exception:
                        pass
            except zmq.ZMQError:
                # This is thrown by the inturupt caused by python handling the
                # SIGCHLD. This is a safe error and we just start the poll
                # again
                continue
            except Exception:
                log.critical(traceback.format_exc())
Example #20
0
    def dumps(self, msg):
        '''
        Run the correct dumps serialization format
        '''
        try:
            return msgpack.dumps(msg)
        except (OverflowError, msgpack.exceptions.PackValueError):
            # msgpack can't handle the very long Python longs for jids
            # Convert any very long longs to strings
            # We borrow the technique used by TypeError below
            def verylong_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = verylong_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = verylong_encoder(entry)
                    return obj
                if six.PY2 and isinstance(obj, long) and long > pow(2, 64):  # pylint: disable=incompatible-py3-code
                    return str(obj)
                elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                    return str(obj)
                else:
                    return obj
            return msgpack.dumps(verylong_encoder(msg))
        except TypeError as e:
            # msgpack doesn't support datetime.datetime datatype
            # So here we have converted datetime.datetime to custom datatype
            # This is msgpack Extended types numbered 78
            def default(obj):
                return msgpack.ExtType(78, obj)

            def dt_encode(obj):
                datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
                return msgpack.packb(datetime_str, default=default)

            def datetime_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = datetime_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = datetime_encoder(entry)
                    return obj
                if isinstance(obj, datetime.datetime):
                    return dt_encode(obj)
                else:
                    return obj

            if "datetime.datetime" in str(e):
                return msgpack.dumps(datetime_encoder(msg))

            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj
            return msgpack.dumps(odict_encoder(msg))
        except (SystemError, TypeError) as exc:
            log.critical('Unable to serialize message! Consider upgrading msgpack. '
                         'Message which failed was {failed_message} '
                         'with exception {exception_message}').format(msg, exc)
Example #21
0
    def _write_state(self):
        '''
        Writes a current status to the defined status-file
        this includes the current pid, events received/handled
        and threads created/joined
        '''
        ev_hdl_per_s = float((float(self.events_han - self.stat_hdl_count)) / float(self.state_timer_intrvl))
        ev_tot_per_s = float((float(self.events_rec - self.stat_rec_count)) / float(self.state_timer_intrvl))

        if self.config['stat_worker']:
            stat_data = {
                'events_rec': self.events_rec,
                'events_hdl': self.events_han,
                'events_hdl_sec': round(ev_hdl_per_s, 2),
                'events_tot_sec': round(ev_tot_per_s, 2),
                'threads_created': self.threads_cre,
                'threads_joined': self.threads_join
            }

            self.threads_cre += 1

            st_worker = SaltEventsdWorker(
                stat_data,
                self.threads_cre,
                None,
                self.backends,
                **self.opts
            )
            st_worker.start()

            try:
                self.running_workers.append(st_worker)
            except AttributeError:
                log.error('self is missing running_workers')
                try:
                    log.info(self)
                    log.info(dir(self))
                except Exception:
                    log.error('Failed to dump dir(self)')

        try:
            # write the info to the specified log
            statf = open(self.state_file, 'w')
            statf.writelines(
                json.dumps({
                    'events_rec': self.events_rec,
                    'events_hdl': self.events_han,
                    'events_hdl_sec': round(ev_hdl_per_s, 2),
                    'events_tot_sec': round(ev_tot_per_s, 2),
                    'threads_created': self.threads_cre,
                    'threads_joined': self.threads_join
                })
            )

            # if we have the same pid as the pidfile, we are the running daemon
            # and also print the current counters to the logfile with 'info'
            if os.getpid() == self.pid:
                log.info("Running with pid {0}".format(self.pid))
                log.info("Events (han/recv): {0}/{1}".format(
                    self.events_han,
                    self.events_rec,
                ))
                log.info("Threads (cre/joi):{0}/{1}".format(
                    self.threads_cre,
                    self.threads_join,
                ))

            statf.write("\n")
            statf.close()
            sys.stdout.flush()
        except IOError as ioerr:
            log.critical("Failed to write state to {0}".format(self.state_file))
            log.exception(ioerr)
        except OSError as oserr:
            log.critical("Failed to write state to {0}".format(self.state_file))
            log.exception(oserr)
        self.stat_rec_count = self.events_rec
        self.stat_hdl_count = self.events_han
Example #22
0
    def dumps(self, msg, use_bin_type=False):
        '''
        Run the correct dumps serialization format

        :param use_bin_type: Useful for Python 3 support. Tells msgpack to
                             differentiate between 'str' and 'bytes' types
                             by encoding them differently.
                             Since this changes the wire protocol, this
                             option should not be used outside of IPC.
        '''
        try:
            if msgpack.version >= (0, 4, 0):
                # msgpack only supports 'use_bin_type' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                return msgpack.dumps(msg, use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(msg)
        except (OverflowError, msgpack.exceptions.PackValueError):
            # msgpack can't handle the very long Python longs for jids
            # Convert any very long longs to strings
            # We borrow the technique used by TypeError below
            def verylong_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = verylong_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = verylong_encoder(entry)
                    return obj
                # This is a spurious lint failure as we are gating this check
                # behind a check for six.PY2.
                if six.PY2 and isinstance(obj, long) and long > pow(2, 64):  # pylint: disable=incompatible-py3-code
                    return str(obj)
                elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                    return str(obj)
                else:
                    return obj
            if msgpack.version >= (0, 4, 0):
                return msgpack.dumps(verylong_encoder(msg), use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(verylong_encoder(msg))
        except TypeError as e:
            # msgpack doesn't support datetime.datetime datatype
            # So here we have converted datetime.datetime to custom datatype
            # This is msgpack Extended types numbered 78
            def default(obj):
                return msgpack.ExtType(78, obj)

            def dt_encode(obj):
                datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
                if msgpack.version >= (0, 4, 0):
                    return msgpack.packb(datetime_str, default=default, use_bin_type=use_bin_type)
                else:
                    return msgpack.packb(datetime_str, default=default)

            def datetime_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = datetime_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = datetime_encoder(entry)
                    return obj
                if isinstance(obj, datetime.datetime):
                    return dt_encode(obj)
                else:
                    return obj

            def immutable_encoder(obj):
                log.debug('IMMUTABLE OBJ: {0}'.format(obj))
                if isinstance(obj, immutabletypes.ImmutableDict):
                    return dict(obj)
                if isinstance(obj, immutabletypes.ImmutableList):
                    return list(obj)
                if isinstance(obj, immutabletypes.ImmutableSet):
                    return set(obj)

            if "datetime.datetime" in str(e):
                if msgpack.version >= (0, 4, 0):
                    return msgpack.dumps(datetime_encoder(msg), use_bin_type=use_bin_type)
                else:
                    return msgpack.dumps(datetime_encoder(msg))
            elif "Immutable" in str(e):
                if msgpack.version >= (0, 4, 0):
                    return msgpack.dumps(msg, default=immutable_encoder, use_bin_type=use_bin_type)
                else:
                    return msgpack.dumps(msg, default=immutable_encoder)

            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj
            if msgpack.version >= (0, 4, 0):
                return msgpack.dumps(odict_encoder(msg), use_bin_type=use_bin_type)
            else:
                return msgpack.dumps(odict_encoder(msg))
        except (SystemError, TypeError) as exc:  # pylint: disable=W0705
            log.critical('Unable to serialize message! Consider upgrading msgpack. '
                         'Message which failed was {failed_message} '
                         'with exception {exception_message}').format(msg, exc)
Example #23
0
    def loads(self, msg, encoding=None, raw=False):
        """
        Run the correct loads serialization format

        :param encoding: Useful for Python 3 support. If the msgpack data
                         was encoded using "use_bin_type=True", this will
                         differentiate between the 'bytes' type and the
                         'str' type by decoding contents with 'str' type
                         to what the encoding was set as. Recommended
                         encoding is 'utf-8' when using Python 3.
                         If the msgpack data was not encoded using
                         "use_bin_type=True", it will try to decode
                         all 'bytes' and 'str' data (the distinction has
                         been lost in this case) to what the encoding is
                         set as. In this case, it will fail if any of
                         the contents cannot be converted.
        """
        try:

            def ext_type_decoder(code, data):
                if code == 78:
                    data = salt.utils.stringutils.to_unicode(data)
                    return datetime.datetime.strptime(data,
                                                      "%Y%m%dT%H:%M:%S.%f")
                return data

            gc.disable()  # performance optimization for msgpack
            loads_kwargs = {"use_list": True, "ext_hook": ext_type_decoder}
            if salt.utils.msgpack.version >= (0, 4, 0):
                # msgpack only supports 'encoding' starting in 0.4.0.
                # Due to this, if we don't need it, don't pass it at all so
                # that under Python 2 we can still work with older versions
                # of msgpack.
                if salt.utils.msgpack.version >= (0, 5, 2):
                    if encoding is None:
                        loads_kwargs["raw"] = True
                    else:
                        loads_kwargs["raw"] = False
                else:
                    loads_kwargs["encoding"] = encoding
                try:
                    ret = salt.utils.msgpack.unpackb(msg, **loads_kwargs)
                except UnicodeDecodeError:
                    # msg contains binary data
                    loads_kwargs.pop("raw", None)
                    loads_kwargs.pop("encoding", None)
                    ret = salt.utils.msgpack.loads(msg, **loads_kwargs)
            else:
                ret = salt.utils.msgpack.loads(msg, **loads_kwargs)
            if encoding is None and not raw:
                ret = salt.transport.frame.decode_embedded_strs(ret)
        except Exception as exc:  # pylint: disable=broad-except
            log.critical(
                "Could not deserialize msgpack message. This often happens "
                "when trying to read a file not in binary mode. "
                "To see message payload, enable debug logging and retry. "
                "Exception: %s",
                exc,
            )
            log.debug("Msgpack deserialization failure on message: %s", msg)
            gc.collect()

            exc_msg = "Could not deserialize msgpack message. See log for more info."
            raise SaltDeserializationError(exc_msg) from exc
        finally:
            gc.enable()
        return ret
Example #24
0
    def tune_in(self):
        '''
        Lock onto the publisher. This is the main event loop for the minion
        '''
        log.info('{0} is starting as user \'{1}\''.format(
            self.__class__.__name__, getpass.getuser()))
        log.debug('Minion "{0}" trying to tune in'.format(self.opts['id']))
        self.context = zmq.Context()

        # Prepare the minion event system
        #
        # Start with the publish socket
        id_hash = hashlib.md5(self.opts['id']).hexdigest()
        epub_sock_path = os.path.join(
            self.opts['sock_dir'], 'minion_event_{0}_pub.ipc'.format(id_hash))
        epull_sock_path = os.path.join(
            self.opts['sock_dir'], 'minion_event_{0}_pull.ipc'.format(id_hash))
        self.epub_sock = self.context.socket(zmq.PUB)
        if self.opts.get('ipc_mode', '') == 'tcp':
            epub_uri = 'tcp://127.0.0.1:{0}'.format(self.opts['tcp_pub_port'])
            epull_uri = 'tcp://127.0.0.1:{0}'.format(
                self.opts['tcp_pull_port'])
        else:
            epub_uri = 'ipc://{0}'.format(epub_sock_path)
            salt.utils.check_ipc_path_max_len(epub_uri)
            epull_uri = 'ipc://{0}'.format(epull_sock_path)
            salt.utils.check_ipc_path_max_len(epull_uri)
        log.debug('{0} PUB socket URI: {1}'.format(self.__class__.__name__,
                                                   epub_uri))
        log.debug('{0} PULL socket URI: {1}'.format(self.__class__.__name__,
                                                    epull_uri))

        # Create the pull socket
        self.epull_sock = self.context.socket(zmq.PULL)
        # Bind the event sockets
        self.epub_sock.bind(epub_uri)
        self.epull_sock.bind(epull_uri)
        # Restrict access to the sockets
        if not self.opts.get('ipc_mode', '') == 'tcp':
            os.chmod(epub_sock_path, 448)
            os.chmod(epull_sock_path, 448)

        self.poller = zmq.Poller()
        self.epoller = zmq.Poller()
        self.socket = self.context.socket(zmq.SUB)
        self.socket.setsockopt(zmq.SUBSCRIBE, '')
        self.socket.setsockopt(zmq.IDENTITY, self.opts['id'])
        if hasattr(zmq, 'RECONNECT_IVL_MAX'):
            self.socket.setsockopt(zmq.RECONNECT_IVL_MAX,
                                   self.opts['recon_max'])
        if hasattr(zmq, 'TCP_KEEPALIVE'):
            self.socket.setsockopt(zmq.TCP_KEEPALIVE,
                                   self.opts['tcp_keepalive'])
            self.socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE,
                                   self.opts['tcp_keepalive_idle'])
            self.socket.setsockopt(zmq.TCP_KEEPALIVE_CNT,
                                   self.opts['tcp_keepalive_cnt'])
            self.socket.setsockopt(zmq.TCP_KEEPALIVE_INTVL,
                                   self.opts['tcp_keepalive_intvl'])
        if hasattr(zmq, 'IPV4ONLY'):
            self.socket.setsockopt(
                zmq.IPV4ONLY,
                int(not int(self.opts.get('ipv6_enable', False))))
        self.socket.connect(self.master_pub)
        self.poller.register(self.socket, zmq.POLLIN)
        self.epoller.register(self.epull_sock, zmq.POLLIN)
        # Send an event to the master that the minion is live
        self._fire_master(
            'Minion {0} started at {1}'.format(self.opts['id'],
                                               time.asctime()), 'minion_start')

        if self.opts['multiprocessing'] and not salt.utils.is_windows():
            signal.signal(signal.SIGCHLD, self.handle_sigchld)
        # Make sure to gracefully handle SIGUSR1
        enable_sigusr1_handler()

        # On first startup execute a state run if configured to do so
        self._state_run()

        while True:
            try:
                self.schedule.eval()
                socks = dict(
                    self.poller.poll(self.opts['loop_interval'] * 1000))
                if self.socket in socks and socks[self.socket] == zmq.POLLIN:
                    payload = self.serial.loads(self.socket.recv())
                    self._handle_payload(payload)
                time.sleep(0.05)
                # Clean up the minion processes which have been executed and
                # have finished
                # Check if modules and grains need to be refreshed
                self.passive_refresh()
                # Check the event system
                if self.epoller.poll(1):
                    try:
                        package = self.epull_sock.recv(zmq.NOBLOCK)
                        self.epub_sock.send(package)
                    except Exception:
                        pass
            except zmq.ZMQError:
                # This is thrown by the inturupt caused by python handling the
                # SIGCHLD. This is a safe error and we just start the poll
                # again
                continue
            except Exception:
                log.critical(traceback.format_exc())
Example #25
0
    def dumps(self, msg):
        '''
        Run the correct dumps serialization format
        '''
        try:
            return msgpack.dumps(msg)
        except (OverflowError, msgpack.exceptions.PackValueError):
            # msgpack can't handle the very long Python longs for jids
            # Convert any very long longs to strings
            # We borrow the technique used by TypeError below
            def verylong_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = verylong_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = verylong_encoder(entry)
                    return obj
                # This is a spurious lint failure as we are gating this check
                # behind a check for six.PY2.
                if six.PY2 and isinstance(obj, long) and long > pow(2, 64):  # pylint: disable=incompatible-py3-code
                    return str(obj)
                elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
                    return str(obj)
                else:
                    return obj

            return msgpack.dumps(verylong_encoder(msg))
        except TypeError as e:
            # msgpack doesn't support datetime.datetime datatype
            # So here we have converted datetime.datetime to custom datatype
            # This is msgpack Extended types numbered 78
            def default(obj):
                return msgpack.ExtType(78, obj)

            def dt_encode(obj):
                datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
                return msgpack.packb(datetime_str, default=default)

            def datetime_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = datetime_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = datetime_encoder(entry)
                    return obj
                if isinstance(obj, datetime.datetime):
                    return dt_encode(obj)
                else:
                    return obj

            if "datetime.datetime" in str(e):
                return msgpack.dumps(datetime_encoder(msg))

            if msgpack.version >= (0, 2, 0):
                # Should support OrderedDict serialization, so, let's
                # raise the exception
                raise

            # msgpack is < 0.2.0, let's make its life easier
            # Since OrderedDict is identified as a dictionary, we can't
            # make use of msgpack custom types, we will need to convert by
            # hand.
            # This means iterating through all elements of a dictionary or
            # list/tuple
            def odict_encoder(obj):
                if isinstance(obj, dict):
                    for key, value in six.iteritems(obj.copy()):
                        obj[key] = odict_encoder(value)
                    return dict(obj)
                elif isinstance(obj, (list, tuple)):
                    obj = list(obj)
                    for idx, entry in enumerate(obj):
                        obj[idx] = odict_encoder(entry)
                    return obj
                return obj

            return msgpack.dumps(odict_encoder(msg))
        except (SystemError, TypeError) as exc:  # pylint: disable=W0705
            log.critical(
                'Unable to serialize message! Consider upgrading msgpack. '
                'Message which failed was {failed_message} '
                'with exception {exception_message}').format(msg, exc)
Example #26
0
    def _write_state(self):
        '''
        Writes a current status to the defined status-file
        this includes the current pid, events received/handled
        and threads created/joined
        '''
        ev_hdl_per_s = float((float(self.events_han - self.stat_hdl_count)) /
                             float(self.state_timer_intrvl))
        ev_tot_per_s = float((float(self.events_rec - self.stat_rec_count)) /
                             float(self.state_timer_intrvl))

        if self.config['stat_worker']:
            stat_data = {
                'events_rec': self.events_rec,
                'events_hdl': self.events_han,
                'events_hdl_sec': round(ev_hdl_per_s, 2),
                'events_tot_sec': round(ev_tot_per_s, 2),
                'threads_created': self.threads_cre,
                'threads_joined': self.threads_join
            }

            self.threads_cre += 1

            st_worker = SaltEventsdWorker(stat_data, self.threads_cre, None,
                                          self.backends, **self.opts)
            st_worker.start()

            try:
                self.running_workers.append(st_worker)
            except AttributeError:
                log.error('self is missing running_workers')
                try:
                    log.info(self)
                    log.info(dir(self))
                except Exception:
                    log.error('Failed to dump dir(self)')

        try:
            # write the info to the specified log
            statf = open(self.state_file, 'w')
            statf.writelines(
                json.dumps({
                    'events_rec': self.events_rec,
                    'events_hdl': self.events_han,
                    'events_hdl_sec': round(ev_hdl_per_s, 2),
                    'events_tot_sec': round(ev_tot_per_s, 2),
                    'threads_created': self.threads_cre,
                    'threads_joined': self.threads_join
                }))

            # if we have the same pid as the pidfile, we are the running daemon
            # and also print the current counters to the logfile with 'info'
            if os.getpid() == self.pid:
                log.info("Running with pid {0}".format(self.pid))
                log.info("Events (han/recv): {0}/{1}".format(
                    self.events_han,
                    self.events_rec,
                ))
                log.info("Threads (cre/joi):{0}/{1}".format(
                    self.threads_cre,
                    self.threads_join,
                ))

            statf.write("\n")
            statf.close()
            sys.stdout.flush()
        except IOError as ioerr:
            log.critical("Failed to write state to {0}".format(
                self.state_file))
            log.exception(ioerr)
        except OSError as oserr:
            log.critical("Failed to write state to {0}".format(
                self.state_file))
            log.exception(oserr)
        self.stat_rec_count = self.events_rec
        self.stat_hdl_count = self.events_han
Example #27
0
class Master(object):
    '''
    Creates a master server
    '''
    def __init__(self):
        self.cli = self.__parse_cli()
        # command line overrides config
        if self.cli['user']:
            self.opts['user'] = self.cli['user']

        # Send the pidfile location to the opts
        if self.cli['pidfile']:
            self.opts['pidfile'] = self.cli['pidfile']

    def __parse_cli(self):
        '''
        Parse the cli for options passed to a master daemon
        '''
        import salt.log
        parser = optparse.OptionParser(version="%%prog %s" % __version__)
        parser.add_option('-d',
                          '--daemon',
                          dest='daemon',
                          default=False,
                          action='store_true',
                          help='Run the master as a daemon')
        parser.add_option('-c',
                          '--config',
                          dest='config',
                          default='/etc/salt/master',
                          help='Pass in an alternative configuration file')
        parser.add_option('-u',
                          '--user',
                          dest='user',
                          help='Specify user to run master')
        parser.add_option('--pid-file',
                          dest='pidfile',
                          help=('Specify the location of the pidfile.'))
        parser.add_option(
            '-l',
            '--log-level',
            dest='log_level',
            choices=list(salt.log.LOG_LEVELS),
            help='Console log level. One of %s. For the logfile settings '
            'see the config file. Default: \'warning\'.' %
            ', '.join([repr(l) for l in salt.log.SORTED_LEVEL_NAMES]))

        options, args = parser.parse_args()

        self.opts = salt.config.master_config(options.config)

        if not options.log_level:
            options.log_level = self.opts['log_level']

        salt.log.setup_console_logger(options.log_level,
                                      log_format=self.opts['log_fmt_console'],
                                      date_format=self.opts['log_datefmt'])

        cli = {
            'daemon': options.daemon,
            'config': options.config,
            'user': options.user,
            'pidfile': options.pidfile
        }

        return cli

    def start(self):
        '''
        Run the sequence to start a salt master server
        '''
        try:
            verify_env([
                self.opts['pki_dir'],
                os.path.join(self.opts['pki_dir'], 'minions'),
                os.path.join(self.opts['pki_dir'], 'minions_pre'),
                os.path.join(self.opts['pki_dir'], 'minions_rejected'),
                self.opts['cachedir'],
                os.path.join(self.opts['cachedir'], 'jobs'),
                os.path.dirname(self.opts['log_file']),
                self.opts['sock_dir'],
            ],
                       self.opts['user'],
                       permissive=self.opts['permissive_pki_access'])
        except OSError, err:
            sys.exit(err.errno)

        import salt.log
        salt.log.setup_logfile_logger(self.opts['log_file'],
                                      self.opts['log_level_logfile']
                                      or self.opts['log_level'],
                                      log_format=self.opts['log_fmt_logfile'],
                                      date_format=self.opts['log_datefmt'])
        for name, level in self.opts['log_granular_levels'].items():
            salt.log.set_logger_level(name, level)

        import logging
        log = logging.getLogger(__name__)
        # Late import so logging works correctly
        if not verify_socket(self.opts['interface'], self.opts['publish_port'],
                             self.opts['ret_port']):
            log.critical('The ports are not available to bind')
            sys.exit(4)

        import salt.master
        master = salt.master.Master(self.opts)
        if self.cli['daemon']:
            # Late import so logging works correctly
            import salt.utils
            salt.utils.daemonize()
        set_pidfile(self.opts['pidfile'])
        if check_user(self.opts['user'], log):
            try:
                master.start()
            except salt.master.MasterExit:
                sys.exit()