def __init__(self, address, socket=None, context=None, timeout=None): """Initialize the component :param address: the address of the component :param socket: the socket to use for communication, if we're sharing (one will be created if not provided) :param timeout: timeout for resolving the component :param context: the context to use to create a socket (if no socket provided), one will be created if not used """ # logging? self.__log = Logger(logging=True) # set up the ZMQ stuff if context is None: if socket is None: self.__log.warn('creating new zmq context for component! something is probably wrong') self.__context = zmq.Context() else: self.__context = context # if we haven't got a socket yet, get one self.__socket = ManagedSocket(self.__context, zmq.REQ) # connect the socket self.__log('connecting to {}'.format(address)) self.__socket.connect(address) # get the cruxfile self.cruxfile = self.request(Message(name='get_cruxfile'), timeout=timeout).payload
def __init__(self, daemon_addr=None, context=None, pool=None): """Initialize the pipeline agent :param daemon_addr: address of the daemon :param context: zmq context to use :param pool: pool to use if no running daemon :raises PipelineAgentInitError: if no process launching mech. exists """ self.__log = Logger(logging=True, name='execpipe') # set up context if context is None: self.__log.warn( 'initializing a zmq context to connect to {}, this may mean something is wrong!' .format(context)) self.__context = zmq.Context() else: self.__context = context # set up pooler if daemon_addr is not None: self.__dapi = DaemonAPI(daemon_addr, context=context) elif pool is None: raise PipelineAgentInitError( 'no process launching mechanism provided!') else: self.__pool = pool self.__cpool = {}
def __init__(self, logging=True, debug=False, bind_addr='tcp://*:30020', pub_addr='tcp://*:30021', context=None, install_loc=None): # logging! self.__log = Logger(logging=logging, name='daemon') # debug mode? self.__debug = debug # set up the zmq context, re-using if we've got one if context is not None: self.__context = context else: self.__context = zmq.Context() # set the addresses self.__apisock_addr = bind_addr self.__pubsock_addr = pub_addr # create the API socket self.__apisock = self.__context.socket(zmq.REP) self.__apisock.bind(self.__apisock_addr) # create the publishing socket self.__pubsock = self.__context.socket(zmq.PUB) self.__pubsock.bind(self.__pubsock_addr) # initialize the process pool self.__pool = ProcessPool() self.__log('initialized daemon')
def __init__(self, description='crux.json', bind=None, context=None, logging=True): """Creates the CruxClient instance :param description: where to find the crux description file. defaults to 'crux.json' :param bind: the address to bind to. if launched by the crux server, this will be automatically set :param context: advanced; specifies a ZMQ context to use (for intra-process comms). if one is not specified, one will be created :param logging: if true, the crux client will log to stdout (on by default) :raises InstantiationException: can fail, error msg will have detail """ # create the log object self.__log = Logger(logging, name='client') # load in all the pieces of the crux description self.__log('loading cruxfile {}...'.format(description)) with open(description, 'r') as cfile: self.cruxfile = json.load(cfile) self.inputs = self.__open_all(self.cruxfile['inputs']) self.cruxfile['inputs'] = self.inputs self.outputs = self.__open_all(self.cruxfile['outputs']) self.cruxfile['outputs'] = self.outputs self.parameters = self.__open_all(self.cruxfile['parameters']) self.cruxfile['parameters'] = self.parameters # change the log name self.__log.set_name(self.cruxfile['name']) self.__log('loaded cruxfile (and subfiles) successfully!') # if the bind address is none, assume we're being run by the crux command line client # extract the bind address from the environment if bind is None: if not 'CRUX_BIND' in os.environ: raise InstantiationException( 'no bind address provided and CRUX_BIND unset!') bind = os.environ['CRUX_BIND'] self.__log('interpreted bind address as {}'.format(bind)) # if the zeromq context is not provided, we'll make our own if context is not None: self.__context = context else: self.__context = zmq.Context() # make the socket and bind it self.__socket = self.__context.socket(zmq.REP) self.__socket.bind(bind) self.__log.info('component listening on {}!'.format(bind))
def __init__(self, script=None): super().__init__() self.__log = Logger(logging=True, name='dispatch') # set up zmq stuff self.__context = zmq.Context() self.__socket = self.__context.socket(zmq.REQ) self.current_msg = Message() self.__set_prompt() if script is not None: with open(script, 'r') as cmds: self.load(cmds.read().splitlines())
class Component: """A description of a running crux component""" # description cruxfile = None address = None # zmq stuff __socket = None __context = None # housekeeping __log = None def __init__(self, address, socket=None, context=None, timeout=None): """Initialize the component :param address: the address of the component :param socket: the socket to use for communication, if we're sharing (one will be created if not provided) :param timeout: timeout for resolving the component :param context: the context to use to create a socket (if no socket provided), one will be created if not used """ # logging? self.__log = Logger(logging=True) # set up the ZMQ stuff if context is None: if socket is None: self.__log.warn('creating new zmq context for component! something is probably wrong') self.__context = zmq.Context() else: self.__context = context # if we haven't got a socket yet, get one self.__socket = ManagedSocket(self.__context, zmq.REQ) # connect the socket self.__log('connecting to {}'.format(address)) self.__socket.connect(address) # get the cruxfile self.cruxfile = self.request(Message(name='get_cruxfile'), timeout=timeout).payload def request(self, msg, timeout=None): """Do a request on this component :param msg: the message to post to the client :param timeout: timeout in ms :returns: the reply """ return self.__socket.call(msg, timeout=timeout)
def __init__(self, context=None): """Initialize the pipeline :param context: the zmq context to use. one will be created if one is not provided. """ # logger name 'pipeline' self.__log = Logger(logging=True, name='pipeline') # init context if context is None: self.__log.warn( 'pipeline is making its own zmq context, something is probably wrong' ) self.__context = zmq.Context()
def __init__(self, daemon_addr, context=None, timeout=None): """Initialize the daemon api pointing at a remote API :param daemon_addr: the daemon to connect to :param context: zmq context to use """ # init logger self.__log = Logger(logging=True, name='api_daemon') # set up context if context is None: self.__log.warn( 'initializing a zmq context to connect to {}, this may mean something is wrong!' .format(context)) self.__context = zmq.Context() else: self.__context = context # create socket and connect self.__timeout = timeout self.__daemon_addr = daemon_addr self.__socket = ManagedSocket(self.__context, zmq.REQ) self.__socket.connect(self.__daemon_addr)
class CruxREPL(cmd.Cmd): # zmq crap __context = None __socket = None __addr = None # message saving and stuff last_msg = None current_msg = None saved_msgs = {} # logging __log = None def __init__(self, script=None): super().__init__() self.__log = Logger(logging=True, name='dispatch') # set up zmq stuff self.__context = zmq.Context() self.__socket = self.__context.socket(zmq.REQ) self.current_msg = Message() self.__set_prompt() if script is not None: with open(script, 'r') as cmds: self.load(cmds.read().splitlines()) def __set_prompt(self): if self.__addr is None: state = colored('disconnected', 'red') else: state = colored(self.__addr, 'green') self.prompt = '({})> '.format(state) def exit(self): if self.__addr is not None: self.do_disconnect(None) def load(self, cmds): stripped = [] for c in cmds: c = c.strip() if c != '': stripped.append(c) self.__log('preloading {}'.format(stripped)) self.cmdqueue.extend(stripped) def message_show(self, msg): if msg is None: print(colored('no message', 'yellow')) return print('{}: {}'.format( colored('name', 'blue'), json.dumps(msg.name) if msg.name is not None else colored( 'None', 'red'))) print('{}: {}'.format( colored('payload', 'blue'), json.dumps(msg.payload) if msg.payload is not None else colored( 'None', 'red'))) print('{}: {}'.format( colored('success', 'blue'), json.dumps(msg.success) if msg.success is not None else colored( 'None', 'red'))) def do_EOF(self, arg): """End the program""" self.exit() print("") return True def do_exit(self, arg): """End the program""" self.exit() return True def do_connect(self, addr): """Connect to an address""" if self.__addr is not None: self.__log.warn( 'socket was connected to {}, disconnecting...'.format( self.__addr)) self.__socket.disconnect(self.__addr) self.__addr = addr self.__socket.connect(self.__addr) self.__log.info('connected to {}'.format(self.__addr)) self.__set_prompt() def do_disconnect(self, args): """Disconnect""" if self.__addr is not None: self.__socket.disconnect(self.__addr) self.__log.info('disconnected from {}'.format(self.__addr)) else: self.__log.warn('socket was not connected, ignoring...') self.__addr = None self.__set_prompt() def do_cset(self, args): """set the (payload/name/success) currently edited message. cset [field] [value]""" try: field, data = args.split(' ', 1) except: self.__log.error('error parsing arguments') return if not field in ['name', 'payload', 'success']: self.__log.error('invalid field \'{}\''.format(field)) return try: setattr(self.current_msg, field, json.loads(data)) except json.decoder.JSONDecodeError as jde: self.__log.error('invalid data \'{}\''.format(data)) else: self.__log.info('set field {} to {}'.format(field, data)) def do_cshow(self, _): """Show the currently edited message""" self.message_show(self.current_msg) def do_creset(self, _): """Reset the current message""" self.current_msg = Message() def do_lshow(self, _): """Show the last received message""" self.message_show(self.last_msg) def do_lreset(self, _): """Reset the last received message""" self.last_msg = None def do_echo(self, txt): """Echo some text""" print('[{}]: {}'.format(colored('echo ', 'magenta'), txt)) def do_send(self, _): """Send the currently edited message to the connected server""" if self.__addr is None: self.__log.error('not connected!') return try: packed = self.current_msg.pack() except MessageException as me: self.__log.error(me.msg) else: self.__socket.send(packed) self.last_msg = Message(data=self.__socket.recv()) self.__log('sending...') def do_assert(self, args): """Make an assertion that the args are equal. assert [field] [value]""" try: field, data = args.split(' ', 1) data = json.loads(data) except: self.__log.error('error parsing arguments') return # resolve the path def resolve_path(obj, fieldpath): if type(obj) is list: fieldpath[0] = int(fieldpath[0]) if len(fieldpath) == 0: return obj if len(fieldpath) == 1: return obj[fieldpath[0]] else: return resolve_path(obj[fieldpath[0]], fieldpath[1:]) try: field = field.split('.') compare = resolve_path(getattr(self.last_msg, field[0]), field[1:]) except: self.__log.warn( 'assertion warning: field path expansion failed for {}'.format( field)) compare = None if compare == data: self.__log.info('assertion passed: {}'.format(data)) else: self.__log.error('assertion failed!: {} != {}'.format( json.dumps(data), json.dumps(compare)))
class PipelineAgent: """Execute a pipeline using the current backend context Note that this can be run independently of the backend by specifying your own process pool rather than a daemon address. If a process pool is specified, it will take precedence over the daemon. """ # process pool __pool = None __daemon_addr = None # zmq stuff __socket = None __context = None # daemon api __dapi = None # components used __cpool = None # logger __log = None def __init__(self, daemon_addr=None, context=None, pool=None): """Initialize the pipeline agent :param daemon_addr: address of the daemon :param context: zmq context to use :param pool: pool to use if no running daemon :raises PipelineAgentInitError: if no process launching mech. exists """ self.__log = Logger(logging=True, name='execpipe') # set up context if context is None: self.__log.warn( 'initializing a zmq context to connect to {}, this may mean something is wrong!' .format(context)) self.__context = zmq.Context() else: self.__context = context # set up pooler if daemon_addr is not None: self.__dapi = DaemonAPI(daemon_addr, context=context) elif pool is None: raise PipelineAgentInitError( 'no process launching mechanism provided!') else: self.__pool = pool self.__cpool = {} def __process_start(self, path): """Start a path using the preferred mechanism :param path: path to start """ if self.__dapi is not None: return self.__dapi.process_start(path).payload else: return self.__pool.launch(path) def __remap_input(self, inp, remap): newinp = {} # copy in all the data for key in remap: newinp[remap[key]] = inp[key] # copy in the rest of the input without clobbering the new input for key in inp: if key not in newinp and key not in remap: newinp[key] = inp[key] return newinp def run(self, pipeline): """Run the supplied pipeline This function yields every intermediate compuational step. :param pipeline: a dictionary object representing a pipeline """ # first set up all required components for depname in pipeline['components']: dep = pipeline['components'][depname] # launch the process, and bind a component handle to it addr = self.__process_start(dep['src']) self.__cpool[depname] = Component(addr, context=self.__context) # check that the dependency satisfies the version requirements if not version_check(self.__cpool[depname].cruxfile['version'], dep['version']): raise UnmetDependencyError( 'dependency {}@{} does not match requirement {}'.format( depname, self.__cpool[depname].cruxfile['version'], dep['version'])) # kick off the pipeline inp = {} count = 0 for step in pipeline['pipeline']: # perform a request to the specified component result = self.__cpool[step['component']].request( Message(name='execute', payload={ 'parameters': step['parameters'], 'inputs': inp })) # handle results if not result.success: # log & fail self.__log.error('{}: {}'.format(step['component'], result.payload)) raise BrokenPipelineError(result.payload) else: # generators are magic yield (count, step, result) if 'remap' in step: inp = self.__remap_input(result.payload, step['remap']) else: inp = result.payload count += 1
class DaemonAPI: # zmq stuff __socket = None __context = None __daemon_addr = None __timeout = None # logger __log = None def __init__(self, daemon_addr, context=None, timeout=None): """Initialize the daemon api pointing at a remote API :param daemon_addr: the daemon to connect to :param context: zmq context to use """ # init logger self.__log = Logger(logging=True, name='api_daemon') # set up context if context is None: self.__log.warn( 'initializing a zmq context to connect to {}, this may mean something is wrong!' .format(context)) self.__context = zmq.Context() else: self.__context = context # create socket and connect self.__timeout = timeout self.__daemon_addr = daemon_addr self.__socket = ManagedSocket(self.__context, zmq.REQ) self.__socket.connect(self.__daemon_addr) def get_addr(self): """Get the currently connected daemon's address :returns: address """ return self.__daemon_addr def disconnect(self): """Disconnect from the daemon""" self.__socket.disconnect() def connect(self, daemon_addr): """Connect from the daemon :param daemon_addr: Address to connect to """ self.__daemon_addr = daemon_addr self.__socket.connect(self.__daemon_addr) def __call(self, msg): """Perform remote call :param msg: message to send """ return self.__socket.call(msg, timeout=self.__timeout) def process_start(self, path): return self.__call(Message(name='process_start', payload=path)) def process_killall(self): return self.__call(Message(name='process_killall')) def process_list(self): return self.__call(Message(name='process_list')) def shutdown(self): return self.__call(Message(name='daemon_shutdown'))
def __init__(self): self.log = Logger(name='webapi', logging=True) self.__context = zmq.Context()
class CruxAPIServer: # housekeeping log = None # zmq stuff __context = None REQ_TIMEOUT = 2500 # milliseconds # daemon api dapi = None # currently editing pipeline pline = None agent = None def __init__(self): self.log = Logger(name='webapi', logging=True) self.__context = zmq.Context() def attach_routes(self, app): """Attach webapi routes :param app: aiohttp web app """ app.router.add_get( '/api/daemon/connect', self.connect_daemon) app.router.add_get( '/api/daemon/get', self.get_daemons) app.router.add_get( '/api/components/list', self.get_all_components) app.router.add_get( '/api/components/get', self.get_component) app.router.add_get( '/api/components/load', self.load_component) app.router.add_post('/api/components/send', self.send_to_component) # --- helper methods --- def create_error(self, msg=None): resp = {'success': False} if msg is not None: resp['message'] = msg self.log.warn('failing request with "{}"!'.format(msg)) else: self.log.warn('failing request!') return web.json_response(resp) # --- daemon methods --- async def connect_daemon(self, req): """Connect daemon (POST) HTTP method: GET Query params: daemon :param req: request from webserver """ if not 'daemon' in req.query: return self.create_error('"daemon" is a required parameter!') if not validate_uri(req.query['daemon']): return self.create_error('"{}" is not a valid address!'.format(req.query['daemon'])) self.log.info('connecting daemon {}'.format(req.query['daemon'])) if self.dapi is None: self.dapi = DaemonAPI(req.query['daemon'], context=self.__context) else: self.dapi.disconnect() self.dapi.connect(req.query['daemon']) return web.json_response({'success': True}) async def get_daemons(self, req): if self.dapi is None: return web.json_response({ 'address': None, 'success': True }) else: return web.json_response({ 'address': self.dapi.get_addr(), 'success': True }) # --- component methods --- def resolve_component(self, addr, timeout=None): return Component(addr, context=self.__context, timeout=timeout if timeout is not None else self.REQ_TIMEOUT) async def get_all_components(self, req): """get_all_components HTTP method: GET :param req: request from webserver """ self.log.info('getting all components') if self.dapi is None: return self.create_error('no daemon connected') # enumerate all component addresses processes = self.dapi.process_list() # fail if enumeration fails if not processes.success: self.log.warn('failed to get processes') return self.create_error('failed to get processes') else: processes = processes.payload # extract cruxfile from all components out = {} for address in processes: try: out[address] = self.resolve_component(address, timeout=self.REQ_TIMEOUT).cruxfile except RequestTimeoutException as rte: pass # pass back return web.json_response({ 'components': out, 'success': True }) async def get_component(self, req): """Get a running component HTTP method: GET Query params: address :param req: request from webserver """ if not 'address' in req.query: return self.create_error('"address" is a required parameter!') self.log.info('getting component {}'.format(req.query['address'])) try: component = self.resolve_component(req.query['address']).cruxfile except RequestTimeoutException as rte: return self.create_error('Request to {} timed out!'.format(req.query['address'])) return web.json_response({ 'component': component, 'success': True }) async def load_component(self, req): """Load component onto the daemon HTTP method: GET Query params: path :param req: request from webserver """ if not 'path' in req.query: return self.create_error('"address" is a required parameter!') if self.dapi is None: return self.create_error('no daemon connected') self.log.info('loading {} onto daemon'.format(req.query['path'])) resp = self.dapi.process_start(req.query['path']) if not resp.success: return self.create_error('process loading failed: {}'.format(resp.payload)) return web.json_response({ 'component': resp.payload, 'success': True }) async def send_to_component(self, req): """Send a query to a component HTTP method: POST Query params: address :param req: request from webserver """ if not 'address' in req.query: return self.create_error('"address" is a required parameter!') msg = await req.json() print(msg) if not 'name' in msg: return self.create_error('name not in message!') try: component = self.resolve_component(req.query['address']) except RequestTimeoutException as rte: return self.create_error('request to {} timed out!'.format(req.query['address'])) self.log('component resolved!') # craft the request request = Message(name=msg['name']) if 'payload' in msg: request.payload = msg['payload'] # make the request try: response = component.request(request, timeout=self.REQ_TIMEOUT) except RequestTimeoutException as rte: return self.create_error('request to {} timed out!'.format(req.query['address'])) # prepare the JSON response resp = { 'name': response.name, 'success': response.success, 'payload': response.payload } # respond return web.json_response({ 'response': resp, 'success': True })
class Pipeline: """Pipeline manager class This class is intended to provide a programmatic way to manipulate pipeline definitions, not as a direct execution agent """ # housekeeping __log = None # pipeline information pipeline = [] components = {} def __init__(self, context=None): """Initialize the pipeline :param context: the zmq context to use. one will be created if one is not provided. """ # logger name 'pipeline' self.__log = Logger(logging=True, name='pipeline') # init context if context is None: self.__log.warn( 'pipeline is making its own zmq context, something is probably wrong' ) self.__context = zmq.Context() def get_order(self): return range(len(self.pipeline)) def set_order(self, order): """Set the order of components to execute This is a set 0-len(pipeline), with the swaps out of order :param order: an array of integers """ # make sure every element of the order set is unique if len(set(order)) != len(order) or set(range(len( self.pipeline))) != set(order): raise PipelineError('invalid order array!') # rebuild the pipeline with the new order (illuminati?) pipeline = [] for idx in order: pipeline.append(self.pipeline[idx]) self.pipeline = pipeline self.__log('re-ordered pipeline to {}'.format(order)) def step_add(self, component, idx=None): """Add a step to this pipeline :param component: name of the component to load :param idx: (optional) index to insert into :raises PipelineError: on unloaded component """ self.__log('adding {} to pipeline position {}'.format( component, idx if idx is not None else 'end')) # throw if we don't have that component if not component in self.components: raise PipelineError( 'component "{}" is not loaded!'.format(component)) # insert at the end if idx is None: idx = len(self.pipeline) self.pipeline.insert(idx, {"component": component, "parameters": {}}) def step_remove(self, idx): """Remove a step from the pipeline :param idx: index to remove """ del self.pipeline[idx] def step_config(self, idx, cfg): """Configure a step of the pipeline :param idx: index to target :param cfg: values to replace with """ self.pipeline[idx] = cfg def component_load(self, name, src, version): """Load a component into this pipeline :param name: name of the component :param src: src (URI, filepath, git) :param version: semver version constraint string :raises PipelineError: if component is already loaded """ if name in self.components: raise PipelineError('component "{}" already loaded!'.format(name)) self.components[name] = {'src': src, 'version': version} def component_unload(self, name): """Unload a component from the pipeline This takes into account if any steps in the pipeline depend on the component If so, the component will not be unloaded and the step must be removed. :param name: name of the component :raises Pipe """ if not name in self.components: raise PipelineError( 'component "{}" not loaded, cannot remove!'.format(name)) for i in range(len(self.pipeline)): step = self.pipeline[i] if step['component'] == name: raise PipelineError('component "{}" in use by step {}!'.format( name, i)) del self.components[name] def load(self, data): """Import a pipeline configuration for editing :param data: data to import from (json string or dict) """ # coerce from json string if it's not a dict if type(data) is not dict: data = json.loads(data) self.components = data['components'] self.pipeline = data['pipeline'] def save(self): """Export the pipeline configuration for this machine :returns: pipeline config (deepcopy) """ return deepcopy(self.components)
class CruxClient: # zeromq stuff __context = None __socket = None __dirty_socket = False # ;) # description stuff inputs = None outputs = None parameters = None cruxfile = None # misc housekeeping __log = None def __init__(self, description='crux.json', bind=None, context=None, logging=True): """Creates the CruxClient instance :param description: where to find the crux description file. defaults to 'crux.json' :param bind: the address to bind to. if launched by the crux server, this will be automatically set :param context: advanced; specifies a ZMQ context to use (for intra-process comms). if one is not specified, one will be created :param logging: if true, the crux client will log to stdout (on by default) :raises InstantiationException: can fail, error msg will have detail """ # create the log object self.__log = Logger(logging, name='client') # load in all the pieces of the crux description self.__log('loading cruxfile {}...'.format(description)) with open(description, 'r') as cfile: self.cruxfile = json.load(cfile) self.inputs = self.__open_all(self.cruxfile['inputs']) self.cruxfile['inputs'] = self.inputs self.outputs = self.__open_all(self.cruxfile['outputs']) self.cruxfile['outputs'] = self.outputs self.parameters = self.__open_all(self.cruxfile['parameters']) self.cruxfile['parameters'] = self.parameters # change the log name self.__log.set_name(self.cruxfile['name']) self.__log('loaded cruxfile (and subfiles) successfully!') # if the bind address is none, assume we're being run by the crux command line client # extract the bind address from the environment if bind is None: if not 'CRUX_BIND' in os.environ: raise InstantiationException( 'no bind address provided and CRUX_BIND unset!') bind = os.environ['CRUX_BIND'] self.__log('interpreted bind address as {}'.format(bind)) # if the zeromq context is not provided, we'll make our own if context is not None: self.__context = context else: self.__context = zmq.Context() # make the socket and bind it self.__socket = self.__context.socket(zmq.REP) self.__socket.bind(bind) self.__log.info('component listening on {}!'.format(bind)) def wait(self): """Waits for a client to ask something :returns: a triple (run inputs, run config, done status) """ while True: if self.__dirty_socket: raise NoResultError() msg = Message(data=self.__socket.recv()) reply = Message(name='ack') self.__log('received {}...'.format(msg.name)) # route our reply if msg.name == 'execute': # first check if the request is valid if msg.payload is not None and 'parameters' in msg.payload and 'inputs' in msg.payload: # this is an execution, pass control back to the main loop (but needing closure) self.__log('passing execution back...') self.__dirty_socket = True return (packing.unpack_io_object( msg.payload['inputs'], defs=self.cruxfile['inputs']), self.__defaultify(msg.payload['parameters']), False) else: reply.name = 'malformed' reply.success = False self.__log.error('received malformed execution request') elif msg.name == 'get_cruxfile': reply.payload = self.cruxfile elif msg.name == 'shutdown': break else: # method called has not yet been implemented/is unknown reply.name = 'nyi' reply.success = False # skipped if there was a shutdown or execute instruction self.__socket.send(reply.pack()) self.__dirty_socket = False # if we've broken out this side of the loop, assume we're shutting down self.__log('shutting down loop...') self.__socket.send(reply.pack()) self.__dirty_socket = False return (None, None, True) def output(self, output): """Returns data to the client :param output: the data to send back """ # create the return message reply = Message(name='return', payload=output, success=True) # pack & send off self.__socket.send(reply.pack(defs=self.cruxfile['outputs'])) self.__dirty_socket = False self.__log('returned output') def fail(self, msg=None): """Fail the current operation :param msg: An optional message to tell the client what's up """ # create the return message reply = Message(name='return', payload=msg, success=False) # pack & send off self.__socket.send(reply.pack()) self.__dirty_socket = False self.__log('returned error message') def __defaultify(self, parameters): """Fill in missing parameters with the defaults :param parameters: Parameters recvd. from the requestor :returns: filled in parameters """ for param in self.cruxfile['parameters']: if 'default' in self.cruxfile['parameters'][param] and ( param not in parameters): parameters[param] = self.cruxfile['parameters'][param][ 'default'] return parameters def __combine(self, objs): """Combine a number of objects, ordered from least to most important :param objs: an array of objects """ final = {} for obj in objs: for key in obj: final[key] = obj[key] return final def __open_all(self, filearr): """Open all files specified combining according to CruxClient#__combine() :param filearr: Array of files (or single file as str) :returns: a combined object """ if type(filearr) is str: filearr = [filearr] pool = [] for f in filearr: with open(f, 'r') as fp: pool.append(json.load(fp)) return self.__combine(pool)
class Daemon: # zmq stuff __context = None __apisock = None __pubsock = None # housekeeping __debug = False __log = None __should_stop = False # addresses __apisock_addr = None __pubsock_addr = None # process pool __processes = None def __init__(self, logging=True, debug=False, bind_addr='tcp://*:30020', pub_addr='tcp://*:30021', context=None, install_loc=None): # logging! self.__log = Logger(logging=logging, name='daemon') # debug mode? self.__debug = debug # set up the zmq context, re-using if we've got one if context is not None: self.__context = context else: self.__context = zmq.Context() # set the addresses self.__apisock_addr = bind_addr self.__pubsock_addr = pub_addr # create the API socket self.__apisock = self.__context.socket(zmq.REP) self.__apisock.bind(self.__apisock_addr) # create the publishing socket self.__pubsock = self.__context.socket(zmq.PUB) self.__pubsock.bind(self.__pubsock_addr) # initialize the process pool self.__pool = ProcessPool() self.__log('initialized daemon') def listen(self): self.__log.info('daemon listening on {}'.format(self.__apisock_addr)) # loop until we stop while not self.__should_stop: message = Message(data=self.__apisock.recv()) try: reply = self.__route(message) except Exception as e: reply = Message(name='failure', payload='internal error', success=False) if self.__debug: self.__log.error(e) self.__apisock.send(reply.pack()) self.__log.warn('stopping daemon!') self.__pool.terminate_all() def __route(self, msg): if msg.name == 'process_start': return self.__process_start(msg) elif msg.name == 'process_list': return self.__process_list(msg) elif msg.name == 'process_killall': return self.__process_killall(msg) elif msg.name == 'daemon_shutdown': return self.__shutdown() return Message(name='nyi', success=False) def __process_start(self, msg): if msg.payload is None: return Message(name='malformed', success=False) path = msg.payload try: addr = self.__pool.launch(path) return Message(name='return', payload=addr) except ProcessLoadError as ple: return Message(name='failure', success=False, payload=ple.msg) def __process_list(self, msg): return Message(name='return', payload=self.__pool.get_all_addrs()) def __process_killall(self, msg): self.__log.info('killing all managed processes...') self.__pool.terminate_all() return Message(name='return') def __shutdown(self): self.__should_stop = True return Message(name='return') def __enter__(self): pass def __exit__(self, e, val, tb): """Flushes pool if unclean""" print('') self.__log.warn('uncleanly flushing pool on shutdown') self.__pool.terminate_all()