def __setstate__(self, state):
     """TODO."""
     self._is_child = True
     SignalHandlingMultiprocessingProcess.__init__(
         self, log_queue=state['log_queue'])
     self.opts = state['opts']
     self.req_channels = state['req_channels']
     self.k_mtime = state['k_mtime']
    def __init__(self, opts, req_channels, name, logger, **kwargs):
        """
        Create a salt minion inference learning worker process.

        :param dict opts: The salt options

        :rtype: InfLearngWorker
        :return: Inference Learning worker
        """
        kwargs['name'] = name
        SignalHandlingMultiprocessingProcess.__init__(self, **kwargs)
        self.opts = opts
        self.log = logger
        self.req_channels = req_channels

        self.k_mtime = 0
示例#3
0
    def async(self, fun, low, user='******'):
        '''
        Execute the function in a multiprocess and return the event tag to use
        to watch for the return
        '''
        async_pub = self._gen_async_pub()

        proc = SignalHandlingMultiprocessingProcess(
                target=self._proc_function,
                args=(fun, low, user, async_pub['tag'], async_pub['jid']))
        with default_signals(signal.SIGINT, signal.SIGTERM):
            # Reset current signals before starting the process in
            # order not to inherit the current signal handlers
            proc.start()
        proc.join()  # MUST join, otherwise we leave zombies all over
        return async_pub
示例#4
0
def handle_decoded_payload(self, data):
    '''
    Override this method if you wish to handle the decoded data
    differently.
    '''
    # Ensure payload is unicode. Disregard failure to decode binary blobs.
    if six.PY2:
        data = salt.utils.data.decode(data, keep=True)
    if 'user' in data:
        log.info('User %s Executing command %s with jid %s', data['user'],
                 data['fun'], data['jid'])
    else:
        log.info('Executing command %s with jid %s', data['fun'], data['jid'])
    log.debug('Command details %s', data)

    # Don't duplicate jobs
    log.trace('Started JIDs: %s', self.jid_queue)
    if self.jid_queue is not None:
        if data['jid'] in self.jid_queue:
            return
        else:
            self.jid_queue.append(data['jid'])
            if len(self.jid_queue) > self.opts['minion_jid_queue_hwm']:
                self.jid_queue.pop(0)

    if isinstance(data['fun'], six.string_types):
        if data['fun'] == 'sys.reload_modules':
            self.functions, self.returners, self.function_errors, self.executors = self._load_modules(
            )
            self.schedule.functions = self.functions
            self.schedule.returners = self.returners

    process_count_max = self.opts.get('process_count_max')
    if process_count_max > 0:
        process_count = len(salt.utils.minion.running(self.opts))
        while process_count >= process_count_max:
            log.warning(
                "Maximum number of processes reached while executing jid {0}, waiting..."
                .format(data['jid']))
            yield tornado.gen.sleep(10)
            process_count = len(salt.utils.minion.running(self.opts))

    # We stash an instance references to allow for the socket
    # communication in Windows. You can't pickle functions, and thus
    # python needs to be able to reconstruct the reference on the other
    # side.
    instance = self
    multiprocessing_enabled = self.opts.get('multiprocessing', True)
    if multiprocessing_enabled:
        if sys.platform.startswith('win'):
            # let python reconstruct the minion on the other side if we're
            # running on windows
            instance = None
        with default_signals(signal.SIGINT, signal.SIGTERM):
            process = SignalHandlingMultiprocessingProcess(
                target=self._target,
                args=(instance, self.opts, data, self.connected))
    else:
        process = threading.Thread(target=self._target,
                                   args=(instance, self.opts, data,
                                         self.connected),
                                   name=data['jid'])

    if multiprocessing_enabled:
        with default_signals(signal.SIGINT, signal.SIGTERM):
            # Reset current signals before starting the process in
            # order not to inherit the current signal handlers
            process.start()
    else:
        process.start()
    process.name = '{}-Job-{}'.format(process.name, data['jid'])
    self.subprocess_list.add(process)
示例#5
0
文件: mixins.py 项目: mahak/salt
    def _gen_async_pub(self, jid=None):
        if jid is None:
            jid = salt.utils.jid.gen_jid()
        tag = tagify(jid, prefix=self.tag_prefix)
        return {'tag': tag, 'jid': jid}

    def async(self, fun, low, user='******'):
        '''
        Execute the function in a multiprocess and return the event tag to use
        to watch for the return
        '''
        async_pub = self._gen_async_pub()

        proc = SignalHandlingMultiprocessingProcess(
                target=self._proc_function,
                args=(fun, low, user, async_pub['tag'], async_pub['jid']))
        with default_signals(signal.SIGINT, signal.SIGTERM):
            # Reset current signals before starting the process in
            # order not to inherit the current signal handlers
            proc.start()
        proc.join()  # MUST join, otherwise we leave zombies all over
        return async_pub

    def print_async_event(self, suffix, event):
        '''
        Print all of the events with the prefix 'tag'
        '''
        if not isinstance(event, dict):
            return