def on_put(self, req, res): """ PUT: /status?token={None} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if req.params[Definition.get_str_token()] == Setting.get_token(): raw = str(req.stream.read(), 'UTF-8') data = eval(raw) LookUpTable.update_worker(data) SysOut.debug_string("Update worker status ({0})".format( data[Definition.get_str_node_name()])) res.body = "Okay" res.content_type = "String" res.status = falcon.HTTP_200 else: res.body = "Invalid token ID." res.content_type = "String" res.status = falcon.HTTP_401
def on_get(self, req, res): """ GET: /status?token={None} """ if not Definition.get_str_token() in req.params: format_response_string(res, falcon.HTTP_401, "Token is required") return if req.params[Definition.get_str_token()] == Setting.get_token(): result = LService.get_machine_status(Setting, CRole.MASTER) format_response_string(res, falcon.HTTP_200, str(result)) else: format_response_string(res, falcon.HTTP_401, "Invalid token ID")
def on_get(self, req, res): # check token and request type is provided if not Definition.get_str_token() in req.params: format_response_string(res, falcon.HTTP_401, "Token required.") return if not "type" in req.params: format_response_string(res, falcon.HTTP_406, "Command not specified.") return # user wants to know if containers are ready for provided job ID if req.params['type'] == "poll_job": id = req.params.get('job_id') if not id in LookUpTable.Jobs.verbose(): format_response_string(res, falcon.HTTP_404, "Specified job not available.") return jobs = LookUpTable.Jobs.verbose() stat = str(jobs[id].get('job_status')) format_response_string(res, falcon.HTTP_200, ("Job status: " + stat)) return
def on_post(self, req, res): # check token and request type is provided req_raw = (str(req.stream.read(req.content_length or 0), 'utf-8')) # create dict of body data if they exist req_data = json.loads(req_raw) if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not "type" in req.params: res.body = "No command specified." res.content_type = "String" res.status = falcon.HTTP_406 return # request to create new job - create ID for job, add to lookup table, queue creation of the job if req.params['type'] == 'new_job': job = new_job( req_data) # attempt to create new job from provided parameters if not job: SysOut.err_string("New job could not be added!") format_response_string(res, falcon.HTTP_500, "Could not create job.") return job_status = job.get('job_status') format_response_string( res, falcon.HTTP_200, "Job request received, container status: {}\nJob ID: {}". format(job_status, job.get('job_id'))) return return
def on_put(self, req, res): """ PUT: /status?token={None} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if Definition.Docker.get_str_finished() in req.params: # a container is shutting down, update containers # TODO: add some kind of safety mechanism to really make sure no new requests have been sent to this container before acknowledging removal? if LookUpTable.remove_container( req.params.get( Definition.Container.get_str_con_image_name()), req.params.get(Definition.Docker.get_str_finished())): format_response_string(res, falcon.HTTP_200, "Container successfully removed") # NOTE: container will terminate as soon as it reads this response! else: format_response_string( res, falcon.HTTP_400, "Could not remove container from table!") # NOTE: container will continue as before when it reads this response! return if req.params[Definition.get_str_token()] == Setting.get_token(): data = json.loads( str(req.stream.read(req.content_length or 0), 'utf-8')) LookUpTable.update_worker(data) SysOut.debug_string("Update worker status ({0})".format( data[Definition.get_str_node_name()])) res.body = "Okay" res.content_type = "String" res.status = falcon.HTTP_200 else: res.body = "Invalid token ID." res.content_type = "String" res.status = falcon.HTTP_401 return
def on_get(self, req, res): """ GET: /status?token={None} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if req.params[Definition.get_str_token()] == Setting.get_token(): result = LService.get_machine_status(Setting, CRole.MASTER) res.body = str(result) res.content_type = "String" res.status = falcon.HTTP_200 else: res.body = "Invalid token ID." res.content_type = "String" res.status = falcon.HTTP_401
def find_available_worker(self, container): candidates = [] workers = LookUpTable.Workers.verbose() SysOut.debug_string("Found workers: " + str(workers)) if not workers: return None # loop through workers and make tuples of worker IP, load and if requested container is available locally for worker in workers: curr_worker = workers[worker] if container in curr_worker[Definition.REST.get_str_local_imgs()]: candidates.append( ((curr_worker[Definition.get_str_node_addr()], curr_worker[Definition.get_str_node_port()]), curr_worker[Definition.get_str_load5()], True)) else: candidates.append( ((curr_worker[Definition.get_str_node_addr()], curr_worker[Definition.get_str_node_port()]), curr_worker[Definition.get_str_load5()], False)) candidates.sort( key=lambda x: (-x[2], x[1]) ) # sort candidate workers first on availability of image, then on load (avg load last 5 mins) for candidate in list(candidates): if not float(candidate[1]) < 0.5: candidates.remove( candidate ) # remove candidates with higher than 50% cpu load return candidates
def on_get(self, req, res): """ GET: /messagesQuery?token=None&command=queueLength This function inquiry about the number of messages in queue. For dealing with create a new instance. """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not Definition.MessagesQueue.get_str_command() in req.params: res.body = "No command specified." res.content_type = "String" res.status = falcon.HTTP_406 return if req.params[Definition.MessagesQueue.get_str_command( )] == Definition.MessagesQueue.get_str_queue_length(): res.body = str(MessagesQueue.get_queues_all()) res.content_type = "String" res.status = falcon.HTTP_200 return if req.params[Definition.MessagesQueue.get_str_command( )] == Definition.MessagesQueue.get_str_current_id(): res.body = "None" res.content_type = "String" res.status = falcon.HTTP_200 return if req.params[Definition.MessagesQueue.get_str_command()] == "verbose": data = LookUpTable.verbose() data['MSG'] = MessagesQueue.verbose() if req.params.get('format') == 'JSON': data = json.dumps(data) res.body = str(data) res.content_type = "String" res.status = falcon.HTTP_200 if req.params[ Definition.MessagesQueue.get_str_command()] == "verbose_html": data = LookUpTable.verbose() data['MSG'] = MessagesQueue.verbose() res.body = get_html_form(data['WORKERS'], data['MSG'], data['CONTAINERS'], data['TUPLES']) res.content_type = "String" res.status = falcon.HTTP_200
def get_container_object(req): ret = dict() ret[Definition.REST.Batch.get_str_batch_addr()] = req.params[ Definition.REST.Batch.get_str_batch_addr()].strip() ret[Definition.REST.Batch.get_str_batch_port()] = int( req.params[Definition.REST.Batch.get_str_batch_port()]) ret[Definition.REST.Batch.get_str_batch_status()] = int( req.params[Definition.REST.Batch.get_str_batch_status()]) ret[Definition.Container.get_str_con_image_name()] = req.params[ Definition.Container.get_str_con_image_name()].strip() ret[Definition.get_str_last_update( )] = Services.get_current_timestamp() return ret
def on_get(self, req, res): """ GET: /status?token={None} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if req.params[Definition.get_str_token()] == Setting.get_token(): s_content = Services.get_machine_status(Setting, CRole.WORKER) s_content[Definition.REST.get_str_docker( )] = DockerService.get_containers_status() res.body = str(s_content) res.content_type = "String" res.status = falcon.HTTP_200 else: res.body = "Invalid token ID." res.content_type = "String" res.status = falcon.HTTP_401
def update_job(request): job_id = request.get('job_id') if not job_id in LookUpTable.Jobs.__jobs: SysOut.warn_string( "Couldn't update job, no existing job matching ID!") return False tkn = request.get(Definition.get_str_token()) if not tkn == LookUpTable.Jobs.__jobs[job_id]['user_token']: SysOut.warn_string("Incorrect token, refusing update.") return False old_job = LookUpTable.Jobs.__jobs[job_id] old_job['job_status'] = request.get('job_status') return True
def get_tuple_object(req): # parameters ret = dict() ret[Definition.Container.get_str_data_digest()] = req.params[ Definition.Container.get_str_data_digest()].strip() ret[Definition.Container.get_str_con_image_name()] = req.params[ Definition.Container.get_str_con_image_name()].strip() ret[Definition.Container.get_str_container_os()] = req.params[ Definition.Container.get_str_container_os()].strip() ret[Definition.Container.get_str_data_source()] = req.params[ Definition.Container.get_str_data_source()].strip() ret[Definition.Container.get_str_container_priority()] = 0 ret[Definition.REST.get_str_status()] = CTuple.SC ret[Definition.get_str_last_update( )] = Services.get_current_timestamp() return ret
def on_post(self, req, res): """ POST: docker?token=None&command={command} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not Definition.Docker.get_str_command() in req.params: res.body = "Command is required." res.content_type = "String" res.status = falcon.HTTP_401 return """ POST: docker?token=None&command=create """ if req.params[Definition.Docker.get_str_command( )] == Definition.Docker.get_str_create(): # Unpack the posted data raw = str(req.stream.read(), 'UTF-8') data = eval(raw) if not data[Definition.Container.get_str_con_image_name()]: res.body = "Required parameters are not supplied!" res.content_type = "String" res.status = falcon.HTTP_401 result = DockerService.create_container( data[Definition.Container.get_str_con_image_name()]) if result: res.body = "Okay" res.content_type = "String" res.status = falcon.HTTP_200 return else: res.body = "Create container error!" res.content_type = "String" res.status = falcon.HTTP_400
def new_job(request): new_item = {} new_id = request.get('job_id') if not new_id: SysOut.warn_string("Couldn't create job, no ID provided!") return False if new_id in LookUpTable.Jobs.__jobs: SysOut.warn_string( "Job already exists in system, can't create!") return False new_item['job_id'] = new_id new_item['job_status'] = request.get('job_status') new_item[ Definition.Container.get_str_con_image_name()] = request.get( Definition.Container.get_str_con_image_name()) new_item['user_token'] = request.get(Definition.get_str_token()) new_item['volatile'] = request.get('volatile') LookUpTable.Jobs.__jobs[new_id] = new_item return True
def on_get(self, req, res): """ GET: docker?token=None&command={command} """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not Definition.Docker.get_str_command() in req.params: res.body = "Command is required." res.content_type = "String" res.status = falcon.HTTP_401 return # Check for status command if req.params[Definition.Docker.get_str_command( )] == Definition.Docker.get_str_status(): body = DockerService.get_containers_status() res.body = str(body) res.content_type = "String" res.status = falcon.HTTP_200
def update_container(dict_input): def cont_in_table(dict_input): conts = LookUpTable.Containers.__containers[dict_input[ Definition.Container.get_str_con_image_name()]] for cont in conts: if dict_input.get(Definition.Container.Status.get_str_sid( )) == cont.get(Definition.Container.Status.get_str_sid()): return cont return None if dict_input[Definition.Container.get_str_con_image_name( )] not in LookUpTable.Containers.__containers: # no containers for this image exist new_cont = [dict_input] LookUpTable.Containers.__containers[dict_input[ Definition.Container.get_str_con_image_name()]] = new_cont else: cont = cont_in_table(dict_input) if not cont: # this specific container is not already in table LookUpTable.Containers.__containers[dict_input[ Definition.Container.get_str_con_image_name()]].append( dict_input) else: # container was already in table, update timestamp cont[Definition.get_str_last_update( )] = Services.get_current_timestamp()
def send_data(self, container_name, container_os, data, priority=None): # The data must be byte array if not isinstance(data, bytearray): LocalError.err_invalid_data_container_type() if len(data) == 0: SysOut.err_string("No content in byte array.") return None digest = hashlib.md5(data).hexdigest() end_point = None counter = self.__max_try while not end_point: end_point = self.__get_stream_end_point(container_name, container_os, priority, digest) counter -= 1 if counter == 0: SysOut.err_string( "Cannot contact server. Exceed maximum retry {0}!".format( self.__max_try)) return False # Send data to worker for processing directly counter = self.__max_try if end_point[Definition.get_str_node_role()] == CRole.WORKER: while not self.__push_stream_end_point( end_point[Definition.get_str_node_addr()], end_point[Definition.get_str_node_port()], data): time.sleep(self.__std_idle_time) counter -= 1 if counter == 0: SysOut.err_string( "Cannot contact server. Exceed maximum retry {0}!". format(self.__max_try)) return False # Send data to master for queuing (?) elif end_point[ Definition.get_str_node_role()] == CRole.MESSAGING_SYSTEM: while not self.__push_stream_end_point_MS( end_point[Definition.get_str_node_addr()], end_point[Definition.get_str_node_port()], data, container_name): time.sleep(self.__std_idle_time) counter -= 1 if counter == 0: SysOut.err_string( "Cannot contact server. Exceed maximum retry {0}!". format(self.__max_try)) return False else: return False if end_point[Definition.get_str_node_role()] == CRole.WORKER: SysOut.out_string( "Push data to worker ({0}:{1}>{2}) successful.".format( end_point[Definition.get_str_node_addr()], end_point[Definition.get_str_node_port()], container_name)) elif end_point[ Definition.get_str_node_role()] == CRole.MESSAGING_SYSTEM: SysOut.out_string( "Push data to messaging system ({0}:{1}>{2}) successful.". format(end_point[Definition.get_str_node_addr()], end_point[Definition.get_str_node_port()], container_name)) else: SysOut.out_string( "Push data to unknown ({0}:{1}>{2}) successful.".format( end_point[Definition.get_str_node_addr()], end_point[Definition.get_str_node_port()], container_name))
def read_cfg_from_file(): from harmonicIO.general.services import Services if not Services.is_file_exist('harmonicIO/worker/configuration.json'): SysOut.terminate_string( 'harmonicIO/worker/configuration.json does not exist') else: with open('harmonicIO/worker/configuration.json', 'rt') as t: import json cfg = json.loads(t.read()) try: from harmonicIO.general.definition import Definition # Check for the json structure if Definition.get_str_node_name() in cfg and \ Definition.get_str_node_port() in cfg and \ Definition.get_str_data_port_range() in cfg and \ Definition.get_str_idle_time() in cfg and \ Definition.get_str_master_addr() in cfg and \ Definition.get_str_master_port() in cfg and \ Definition.get_str_node_external_addr() in cfg and \ Definition.get_str_node_internal_addr(): # Check port number is int or not if not isinstance(cfg[Definition.get_str_node_port()], int): SysOut.terminate_string( "Node port must be integer.") elif not isinstance( cfg[Definition.get_str_data_port_range()], list): SysOut.terminate_string("Port range must be list.") elif not (isinstance(cfg[Definition.get_str_data_port_range()][0], int) and \ isinstance(cfg[Definition.get_str_data_port_range()][1], int)): SysOut.terminate_string( "Port range must be integer.") elif not isinstance( cfg[Definition.get_str_master_port()], int): SysOut.terminate_string( "Master port must be integer.") elif len(cfg[ Definition.get_str_data_port_range()]) != 2: SysOut.terminate_string( "Port range must compost of two elements: start, stop." ) elif not isinstance( cfg[Definition.get_str_idle_time()], int): SysOut.terminate_string( "Idle time must be integer.") elif cfg[Definition.get_str_data_port_range()][0] > \ cfg[Definition.get_str_data_port_range()][1]: SysOut.terminate_string( "Start port range must greater than stop port range." ) else: Setting.set_node_addr() import multiprocessing Setting.__node_name = cfg[ Definition.get_str_node_name()].strip() Setting.__node_port = cfg[ Definition.get_str_node_port()] Setting.__node_data_port_start = cfg[ Definition.get_str_data_port_range()][0] Setting.__node_data_port_stop = cfg[ Definition.get_str_data_port_range()][1] Setting.__std_idle_time = cfg[ Definition.get_str_idle_time()] Setting.__master_addr = cfg[ Definition.get_str_master_addr()].strip() Setting.__master_port = cfg[ Definition.get_str_master_port()] Setting.__node_external_addr = cfg[ Definition.get_str_node_external_addr()].strip( ).lower() # Check for auto node name if Setting.__node_name.lower() == "auto": # Get node name from host name import socket Setting.__node_name = socket.gethostname() # Check for overriding node address if cfg[Definition.get_str_node_internal_addr()] and \ cfg[Definition.get_str_node_internal_addr()] != "auto": # Set node name automatically from hostname from harmonicIO.general.services import Services if Services.is_valid_ipv4(cfg[Definition.get_str_node_internal_addr()]) or \ Services.is_valid_ipv6(cfg[Definition.get_str_node_internal_addr()]): Setting.__node_internal_addr = cfg[ Definition.get_str_node_internal_addr( )] # Check for node address validity if Setting.get_node_external_addr() != "none": from harmonicIO.general.services import Services if Services.is_valid_ipv4(Setting.get_node_external_addr()) or \ Services.is_valid_ipv6(Setting.get_node_external_addr()): SysOut.out_string( "By pass request with external address." ) else: SysOut.terminate_string( "Invaliid external ip address!") else: Setting.__node_external_addr = None SysOut.out_string("Load setting successful.") else: SysOut.terminate_string( "Required parameters are not present.") except Exception as e: print(e) SysOut.terminate_string( "Invalid data in configuration file.")
def on_get(self, req, res): """ return "&c_name=" + container_name + "&c_os=" + container_os + "&priority=" + str(priority) GET: /streamRequest?token=None This function is mainly respond with the available channel for streaming from data source. """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return # Check for required parameter. if not Definition.Container.get_str_con_image_name() in req.params: res.body = "Container name is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not Definition.Container.get_str_container_os() in req.params: res.body = "Container os is required." res.content_type = "String" res.status = falcon.HTTP_401 return if not Definition.Container.get_str_data_source() in req.params: res.body = "Data digest is required." res.content_type = "String" res.status = falcon.HTTP_401 return # Parse to dict object ret = LookUpTable.Tuples.get_tuple_object(req) if Definition.Container.get_str_container_priority() in req.params: if LService.is_str_is_digit(req.params[ Definition.Container.get_str_container_priority()]): ret[Definition.Container.get_str_container_priority()] = int( req.params[ Definition.Container.get_str_container_priority()]) else: res.body = "Container priority is not digit." res.content_type = "String" res.status = falcon.HTTP_401 return # Register item into tuples LookUpTable.Tuples.add_tuple_info(ret) # Check for the availability of the container ret = LookUpTable.get_candidate_container( ret[Definition.Container.get_str_con_image_name()]) if ret: res.body = Definition.Master.get_str_end_point(ret) res.content_type = "String" res.status = falcon.HTTP_200 return else: # No streaming end-point available res.body = Definition.Master.get_str_end_point_MS(Setting) res.content_type = "String" res.status = falcon.HTTP_200 return
def on_post(self, req, res): """ POST: /streamRequest?token=None This function invoked by the driver in micro-batch in the container. It responds with getting a stream from data source or from messaging system. """ if not Definition.get_str_token() in req.params: res.body = "Token is required." res.content_type = "String" res.status = falcon.HTTP_401 return # Check that the PE is existing or not, if not insert and respond if Definition.REST.Batch.get_str_batch_addr() in req.params and \ Definition.REST.Batch.get_str_batch_port() in req.params and \ Definition.REST.Batch.get_str_batch_status() in req.params and \ Definition.Container.get_str_con_image_name() in req.params: # Check for data type if req.params[Definition.REST.Batch.get_str_batch_port()].isdigit() and \ req.params[Definition.REST.Batch.get_str_batch_status()].isdigit(): ret = LookUpTable.Containers.get_container_object(req) # If queue contain data, ignore update and stream from queue length = MessagesQueue.get_queues_length( ret[Definition.Container.get_str_con_image_name()]) if not length: LookUpTable.Containers.update_container(ret) SysOut.debug_string("No item in queue!") res.body = "No item in queue" res.content_type = "String" res.status = falcon.HTTP_200 return if length > 0 and ret[ Definition.REST.Batch.get_str_batch_status( )] == CStatus.AVAILABLE: # ret[Definition.REST.Batch.get_str_batch_status()] = CStatus.BUSY # LookUpTable.Containers.update_container(ret) res.data = bytes( MessagesQueue.pop_queue(ret[ Definition.Container.get_str_con_image_name()])) res.content_type = "Bytes" res.status = falcon.HTTP_203 return else: # Register a new channel LookUpTable.Containers.update_container(ret) res.body = "OK" res.content_type = "String" res.status = falcon.HTTP_200 return else: res.body = "Invalid data type!" res.content_type = "String" res.status = falcon.HTTP_406 return else: res.body = "Invalid parameters!" res.content_type = "String" res.status = falcon.HTTP_406 return
def read_cfg_from_file(): from harmonicIO.general.services import Services, SysOut if not Services.is_file_exist('harmonicIO/master/configuration.json'): SysOut.terminate_string( 'harmonicIO/master/configuration.json does not exist!') else: with open('harmonicIO/master/configuration.json', 'rt') as t: import json cfg = json.loads(t.read()) try: from harmonicIO.general.definition import Definition # Check for the json structure if Definition.get_str_node_name() in cfg and \ Definition.get_str_node_port() in cfg and \ Definition.get_str_master_addr() in cfg and \ Definition.get_str_data_port_range() in cfg and \ Definition.get_str_idle_time() in cfg: # Check port number is int or not if not isinstance(cfg[Definition.get_str_node_port()], int): SysOut.terminate_string( "Node port must be integer!") if not isinstance( cfg[Definition.get_str_master_addr()], str): SysOut.terminate_string( "Master address must be string!") elif not isinstance( cfg[Definition.get_str_data_port_range()], list): SysOut.terminate_string("Port range must be list!") elif not (isinstance(cfg[Definition.get_str_data_port_range()][0], int) and \ isinstance(cfg[Definition.get_str_data_port_range()][1], int)): SysOut.terminate_string( "Port range must be integer!") elif len(cfg[ Definition.get_str_data_port_range()]) != 2: SysOut.terminate_string( "Port range must compost of two elements: start, stop!" ) elif not isinstance( cfg[Definition.get_str_idle_time()], int): SysOut.terminate_string( "Idle time must be integer!") elif cfg[Definition.get_str_data_port_range()][0] > \ cfg[Definition.get_str_data_port_range()][1]: SysOut.terminate_string( "Start port range must greater than stop port range!" ) else: Setting.__node_name = cfg[ Definition.get_str_node_name()].strip() Setting.__node_port = cfg[ Definition.get_str_node_port()] Setting.__node_data_port_start = cfg[ Definition.get_str_data_port_range()][0] Setting.__node_data_port_stop = cfg[ Definition.get_str_data_port_range()][1] Setting.__std_idle_time = cfg[ Definition.get_str_idle_time()] SysOut.out_string("Load setting successful.") try: if cfg[Definition.get_str_master_addr()].lower( ) == "auto": Setting.__node_addr = Services.get_host_name_i( ) SysOut.out_string( "Assigning master ip address automatically." ) elif Services.is_valid_ipv4(cfg[Definition.get_str_master_addr()]) or \ Services.is_valid_ipv6(cfg[Definition.get_str_master_addr()]): Setting.set_node_addr( cfg[Definition.get_str_master_addr()]) else: SysOut.terminate_string( "Invalid master IP address format!") except: SysOut.terminate_string( "Cannot assign IP address to the master!") else: SysOut.terminate_string( "Invalid data in configuration file.") except: SysOut.terminate_string( "Invalid data in configuration file.")
def get_tuple_id(tuple_info): return tuple_info[Definition.Container.get_str_data_digest( )][0:12] + ":" + str(tuple_info[Definition.get_str_last_update()])
def add_worker(dict_input): dict_input[Definition.get_str_last_update( )] = Services.get_current_timestamp() LookUpTable.Workers.__workers[dict_input[ Definition.get_str_node_addr()]] = dict_input