def __init__(self): """ MultiJob constructor """ self._jobs = {} self.client = APIClient(opts=MASTER_OPTIONS) self.handler = Handler()
def subprocess_runner(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() u = SaltClient.verify_token(msg['token']) if not u: q.put({"error": "Invalid token"}) return resp = SaltClient.runnerClient.cmd(cmdmesg['fun'], cmdmesg['arg']) q.put(json.dumps(resp))
def subprocess_auth(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() try: token = SaltClient.create_token(msg) except: token = { 'error': 'Invalid credentials', 'details': 'Authentication failed with provided credentials.' } q.put(json.dumps(token))
def runner_manage_present_async(self): ''' Make a call to runner.manage.present and test against returned SSE data ''' self.app.post_json('/run', dict(client='master', fun='runner.manage.present', kwarg={}), headers=self.headers) keep_looping = True client = APIClient() sse = None while keep_looping: sse = client.get_event(wait=5, tag='salt/', full=True) keep_looping = False self.assertNotEqual(sse, None)
def subprocess_cmd(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() u = SaltClient.verify_token(msg['token']) if not u: q.put({"error": "Invalid token"}) return retval = SaltClient.run(msg) echodict = deepcopy(msg) echodict.pop('token') if msg.get('mode', 'async') == 'async': echodict['minions'] = retval['minions'] echodict['jid'] = retval['jid'] else: echodict['result'] = retval echodict['username'] = u['name'] q.put(json.dumps(echodict))
def runner_manage_present_datastructure(self): ''' Make a call to runner.manage.present and test data structure integrity ''' resp = self.app.post_json('/run', dict(client='master', fun='runner.manage.present', kwarg={}), headers=self.headers) tag = resp.json_body['return'][0]['tag'] client = APIClient() data = None while not data: sse = client.get_event(wait=0.01, tag='salt/', full=True) if sse['tag'] == '%s/ret' % tag: data = sse['data']['return'] self.assertNotEqual(data, None)
class ClientWrapper(): EventFeedListeners = [] def __init__(self): self.SaltClient = APIClient() def auth(self, username, password, eauth='pam'): '''Authenticates a user against external auth and returns a token.''' try: token = self.SaltClient.create_token({ 'username': username, 'password': password, 'eauth': eauth }) except: token = { 'error': 'Invalid credentials', 'details': 'Authentication failed with provided credentials.' } return token def get_event(self, tag=''): self.SaltClient.get_event(tag) def cmd(self, cmdmesg): cdict = { 'mode': 'async' } # TODO: async? cdict['fun'] = cmdmesg['method'] cdict['tgt'] = cmdmesg['pattern'] cdict['expr_form'] = cmdmesg.get('pattern_type', 'glob') cdict['kwarg'] = cmdmesg.get('kwargs', {}) cdict['arg'] = cmdmesg.get('args', []) cdict['token'] = cmdmesg['token'] retval = self.SaltClient.run(cdict) return retval
def __init__(self): self.SaltClient = APIClient()
class MultiJob(object): def __init__(self): """ MultiJob constructor """ self._jobs = {} self.client = APIClient(opts=MASTER_OPTIONS) self.handler = Handler() def add(self, job): """ Adds a job to be tracked. The job is published with the salt apiclient. The resulting dict containing the job id and the minions associated with the job id are stored for later use. @param salt_job - SaltCommand object containing a dictionary defining parameters of the salt job to be published @return - Boolean True for successful publish, Boolean False otherwise """ pub_data = self.client.run(job.kwargs) job.set_pub_data(pub_data) self._jobs[job.jid] = job def is_finished(self): """ Checks to see if all jobs are finished. @return - Boolean true for finished, Boolean false otherwise """ return all([job.is_finished() for job in self._jobs.itervalues()]) def should_process_event(self, event): """ Checks whether or not we need to process an event. Events should have a jid and a return. The jid should be a job belonging to this MultiJob The job should not be finished yet. @param event - Dictionary representing an event. @return Boolean True for yes, False otherwise. """ jid = event.get('jid') ret = event.get('return') if jid is None or ret is None: return False if jid not in self._jobs: return False job = self._jobs[jid] if job.is_finished(): return False return True def wait(self, timeout): """ Waits for all jobs so far to be finished. If a job finishes that is part of a sequence of jobs, the next job in the sequenced is published. @param timeout - Float or int describing number of seconds to wait in total before returning. @return dict - Dictionary of responses """ start = time.time() timeout_at = start + timeout while True: # Break on timeout if time.time() > timeout_at: break # Listen for all events with tag set to ''. # Need to be able to listen for multiple jobs. event = self.client.get_event(tag='', wait=0.25) # Check for no event received if event is None: continue if self.should_process_event(event): job = self._jobs[event.get('jid')] job.add_minion_return(event) if job.is_finished(): self.handler.handle_finish(job) if job.chain: self.add(job.chain) # Break on all jobs finished if self.is_finished(): break errors = [] # Validate our jobs for jid, job in self._jobs.iteritems(): try: job.validate() except (UnfinishedException, UnsuccessfulException, RetcodeException, FailedStateSlsException) as e: errors.append(e) if errors: raise MultiJobException(errors) resp = {jid: job.ret for jid, job in self._jobs.iteritems()} return resp
def __init__(self, *args, **kwargs): self.SaltClient = APIClient() self.event_listener_proc = Process(target=subprocess_read_events, args=(self.event_queue,)) self.event_listener_proc.start() self.event_processor = Greenlet.spawn(process_events, self) super(ZeroServer, self).__init__(*args, **kwargs)
class ZeroServer(object): event_listeners = set() event_queue = Queue() def __init__(self, *args, **kwargs): self.SaltClient = APIClient() self.event_listener_proc = Process(target=subprocess_read_events, args=(self.event_queue,)) self.event_listener_proc.start() self.event_processor = Greenlet.spawn(process_events, self) super(ZeroServer, self).__init__(*args, **kwargs) def __del__(self, *args, **kwargs): self.event_listener_proc.kill() self.event_processor.kill() super(ZeroServer, self).__del__(*args, **kwargs) def validate_token(self, token): r = self.SaltClient.verify_token(token) if not r: r = {"start": '', "token": token, "expire": '', "name": '', "eauth": '', "valid": False} else: r['valid'] = True return r @zerorpc.stream def event_stream(self, token): v = self.validate_token(token) if v.get('valid', False): try: q = GQueue() self.event_listeners.add(q) for msg in q: yield msg finally: self.event_listeners.remove(q) def auth(self, username, password, eauth='pam'): '''Authenticates a user against external auth and returns a token.''' def subprocess_auth(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() try: token = SaltClient.create_token(msg) except: token = { 'error': 'Invalid credentials', 'details': 'Authentication failed with provided credentials.' } q.put(json.dumps(token)) q = GQueue() msg = { 'username': username, 'password': password, 'eauth': eauth } subprocess_auth(msg, q) token = q.get() return json.loads(token) def cmd(self, cmdmesg): def subprocess_cmd(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() u = SaltClient.verify_token(msg['token']) if not u: q.put({"error": "Invalid token"}) return retval = SaltClient.run(msg) echodict = deepcopy(msg) echodict.pop('token') if msg.get('mode', 'async') == 'async': echodict['minions'] = retval['minions'] echodict['jid'] = retval['jid'] else: echodict['result'] = retval echodict['username'] = u['name'] q.put(json.dumps(echodict)) q = GQueue() subprocess_cmd(cmdmesg, q) retval = q.get() return json.loads(retval) def runner_sync(self, cmdmesg): def subprocess_runner(msg, q): from salt.client.api import APIClient import json SaltClient = APIClient() u = SaltClient.verify_token(msg['token']) if not u: q.put({"error": "Invalid token"}) return resp = SaltClient.runnerClient.cmd(cmdmesg['fun'], cmdmesg['arg']) q.put(json.dumps(resp)) q = GQueue() subprocess_runner(cmdmesg, q) retval = q.get() return json.loads(retval) def signature(self, tgt, module, token): cdict = {} cdict['tgt'] = tgt cdict['module'] = module cdict['token'] = token j = self.SaltClient.signature(cdict) resp = self.get_job(j['jid']) while len(resp) == 0: sleep(1) resp = self.get_job(j['jid']) return resp def get_minions(self, mid='*'): def subprocess_minon(mid, q): from salt.client.api import APIClient import json SaltClient = APIClient() resp = SaltClient.runnerClient.cmd('cache.grains', mid) q.put(json.dumps(resp)) q = GQueue() subprocess_minon(mid, q) retval = q.get() return json.loads(retval) def get_job(self, jid): def subprocess_job(jid, q): from salt.client.api import APIClient import json SaltClient = APIClient() resp = SaltClient.runnerClient.cmd('jobs.lookup_jid', jid) q.put(json.dumps(resp)) q = GQueue() subprocess_job(jid, q) retval = q.get() return json.loads(retval) def get_active(self): def subprocess_job(q): from salt.client.api import APIClient import json SaltClient = APIClient() resp = SaltClient.runnerClient.cmd('jobs.active') q.put(json.dumps(resp)) q = GQueue() subprocess_job(q) retval = q.get() return json.loads(retval) def broadcast_event(self, e): for q in self.event_listeners: q.put_nowait(json.loads(e))
from salt.client.api import APIClient client = APIClient() mine_args = ('roles:ambari', 'host', 'grain') resp = client.localClient.cmd('roles:ambari-server', 'mine.get', arg=mine_args, expr_form='grain') hosts = resp.values()[0].values() for host in hosts: print host