def _invoke(self, timeout) -> bool: """ invokes the command on the remote server Returns ------- bool: True if command ended up gracefully. False otherwise Raises ------ TimeoutError: raised when timeout reached while waiting the response back from the remote server """ ctx = None if not has_app_context(): ctx = self._app.app_context() ctx.push() try: self.__dict__['_server'] = Server.query.get(self._server) # set a timeout if none to avoid infinite wait in event if timeout is None: timeout = defaults.TIMEOUT_REMOTE_COMMAND if not self._command._cp: auth = HTTPBearerAuth( create_access_token(self._command.var_context.env['executor_id'], datetime.timedelta(seconds=15))) start = time.time() data = dict(operation=base64.b64encode(pickle.dumps(self._command.implementation)).decode('ascii'), var_context=base64.b64encode(pickle.dumps(self._command.var_context)).decode('ascii'), params=base64.b64encode(pickle.dumps(self._command.params)).decode('ascii'), timeout=timeout, step_id=str(self.id[1]), orch_execution=self._command.register.json_orch_execution, event_id=str(uuid.uuid4())) resp = post(server=self.server, view_or_url='api_1_0.launch_operation', json=data, auth=auth, timeout=timeout) if resp.code == 204: current_app.events.register(data['event_id'], self.callback_completion_event) event = self._completion_event.wait(timeout=timeout - (time.time() - start)) if event is not True: self._command._cp = CompletedProcess(success=False, stdout='', stderr=f'Timeout of {timeout} reached waiting ' f'server operation completion') elif resp.code == 200: self.callback_completion_event(Event(None, data=resp.msg)) elif resp.code: if isinstance(resp.msg, dict): msg = json.dumps(resp.msg) else: msg = str(resp.msg) self._command._cp = CompletedProcess(success=False, stdout='', stderr=msg, rc=resp.code) finally: if ctx: ctx.pop() return self.success
def test_post(self, m): status = 200 msg = {'new': 'data'} data = {'data': 'some data'} def callback(request, **kwargs): if isinstance(request, PreparedRequest): self.assertDictEqual(data, unpack_msg(json.loads(request.body))) return status, {}, json.dumps(pack_msg(msg)) else: self.assertDictEqual(data, unpack_msg(kwargs['json'])) return CallbackResult(status=status, payload=pack_msg(msg)) responses.add_callback(responses.POST, self.url, callback=callback) m.post(self.url, callback=callback) resp = post(self.server, 'home', json=data) self.assertEqual(status, resp.code) self.assertDictEqual(msg, resp.msg) resp = run(async_post(self.server, 'home', json=data)) self.assertEqual(status, resp.code) self.assertDictEqual(msg, resp.msg)
def test_post_no_content_in_response(self, m): msg = '' status = 204 responses.add(responses.POST, self.url, status=204) m.post(self.url, status=204) data, status = post(self.server, 'home') self.assertEqual(status, status) self.assertEqual(msg, data) data, status = run(async_post(self.server, 'home')) self.assertEqual(status, status) self.assertEqual(msg, data)
def run_command_and_callback(operation: 'IOperationEncapsulation', params, context: Context, source: Server, step_execution: StepExecution, event_id, identity, timeout=None): execution: StepExecution = db.session.merge(step_execution) exec_id = execution.id source = db.session.merge(source) start = get_now() try: cp = operation.execute(params, timeout=timeout, context=context) except Exception as e: cp = CompletedProcess( success=False, stderr=f"Error while executing operation. {format_exception(e)}", start_time=start, end_time=get_now()) finally: execution.load_completed_result(cp) data = dict(step_execution=execution.to_json()) if execution.child_orch_execution: data['step_execution'].update( orch_execution=execution.child_orch_execution.to_json( add_step_exec=True)) # commit after data is dumped try: db.session.commit() except Exception as e: current_app.logger.exception( f"Error on commit for execution {exec_id}") resp, code = ntwrk.post(server=source, view_or_url='api_1_0.events', view_data={'event_id': event_id}, json=data, identity=identity) if code != 202: current_app.logger.error( f"Error while sending result for execution {exec_id}: {code}, {resp}" ) return data
def send(): def search_cost(ssa, route_list): cost = [ route['cost'] for route in route_list if str(ssa.server.id) == route['destination_id'] ] if cost: if cost[0] is None: cost = 999999 else: cost = cost[0] else: cost = 999999 return cost # Validate Data json_data = request.get_json() dest_server = Server.query.get_or_raise(json_data['dest_server_id']) if 'software_id' in json_data: software = Software.query.get_or_raise(json_data['software_id']) ssa = SoftwareServerAssociation.query.filter_by( server=g.server, software=software).one_or_none() # if current server does not have the software, forward request to the closest server who has it if not ssa: resp = ntwrk.get(dest_server, 'api_1_0.routes', timeout=5) if resp.code == 200: ssas = copy.copy(software.ssas) ssas.sort(key=functools.partial( search_cost, route_list=resp.msg['route_list'])) # unable to get route cost, we take the first option we have else: ssas = random.shuffle(list(software.ssas)) if not ssas or len(ssas) == 0: raise errors.NoSoftwareServer(software_id=str(software.id)) server = ssas[ 0].server # closest server from dest_server who has the software resp = ntwrk.post(server, 'api_1_0.send', json=json_data) resp.raise_if_not_ok() return resp.msg, resp.code else: file = os.path.join(ssa.path, software.filename) if not os.path.exists(file): raise errors.FileNotFound(file) size = ssa.software.size else: file = json_data['file'] if os.path.exists(file): size = os.path.getsize(file) checksum = md5(json_data.get('file')) else: raise errors.FileNotFound(file) chunk_size = d.CHUNK_SIZE * 1024 * 1024 max_senders = min(json_data.get('max_senders', d.MAX_SENDERS), d.MAX_SENDERS) chunks = math.ceil(size / chunk_size) if 'software_id' in json_data: json_msg = dict(software_id=str(software.id), num_chunks=chunks) if 'dest_path' in json_data: json_msg['dest_path'] = json_data.get('dest_path') else: json_msg = dict(dest_path=json_data['dest_path'], filename=os.path.basename(json_data.get('file')), size=size, checksum=checksum, num_chunks=chunks) # if dest_path not set, file will be sent to if 'force' in json_data: json_msg['force'] = json_data['force'] resp = ntwrk.post(dest_server, 'api_1_0.transferlist', json=json_msg) resp.raise_if_not_ok() transfer_id = resp.msg.get('id') current_app.logger.debug( f"Transfer {transfer_id} created. Sending {file} to {dest_server}:{json_data.get('dest_path')}." ) if json_data.get('background', True): executor.submit( asyncio.run, async_send_file(dest_server=dest_server, transfer_id=transfer_id, file=file, chunk_size=chunk_size, max_senders=max_senders, identity=get_jwt_identity())) else: asyncio.run( async_send_file(dest_server=dest_server, transfer_id=transfer_id, file=file, chunk_size=chunk_size, max_senders=max_senders, identity=get_jwt_identity())) if json_data.get('include_transfer_data', False): resp = ntwrk.get(dest_server, "api_1_0.transferresource", view_data=dict(transfer_id=transfer_id)) if resp.code == 200: msg = resp.msg else: resp.raise_if_not_ok() else: msg = {'transfer_id': transfer_id} return msg, 202 if json_data.get('background', True) else 201
def _notify_cluster_in(self): from dimensigon.domain.entities import Server import dimensigon.web.network as ntwrk from dimensigon.domain.entities import Parameter try: signaled = self._route_initiated.wait(timeout=120) except Exception: return if not signaled: self.logger.warning("Route Event not fired.") self.logger.debug("Notify Cluster") with self.dm.flask_app.app_context(): not_notify = set() me = Server.get_current() msg = [ r.to_json() for r in Route.query.options( orm.lazyload(Route.destination), orm.lazyload(Route.gate), orm.lazyload(Route.proxy_server)).all() ] neighbours = Server.get_neighbours() if Parameter.get('join_server', None): join_server = Server.query.get(Parameter.get('join_server')) else: join_server = None now = get_now() msg = dict(keepalive=now.strftime(defaults.DATEMARK_FORMAT), routes=msg) if neighbours: random.shuffle(neighbours) first = [ s for s in neighbours if s.id == Parameter.get('new_gates_server', None) ] if first: neighbours.pop(neighbours.index(first[0])) neighbours = first + neighbours elif join_server in neighbours: neighbours.pop(neighbours.index(join_server)) neighbours = [join_server] + neighbours for s in neighbours: if s.id not in not_notify: self.logger.debug( f"Sending 'Cluster IN' message to {s}") resp = ntwrk.post(s, 'api_1_0.cluster_in', view_data=dict(server_id=str(me.id)), json=msg, timeout=10, auth=get_root_auth()) if resp.ok: converted = [] for ident, str_keepalive, death in resp.msg[ 'cluster']: try: keepalive = dt.datetime.strptime( str_keepalive, defaults.DATEMARK_FORMAT) except ValueError: continue converted.append((ident, keepalive, death)) self.put_many(converted) not_notify.update(resp.msg.get('neighbours', [])) else: self.logger.debug( f"Unable to send 'Cluster IN' message to {s} . Response: {resp}" ) else: self.logger.debug( f"Skiping server {s} from sending 'Cluster IN' message" ) # alive = [(getattr(Server.query.get(s_id), 'name', None) or s_id) for s_id in # self.get_alive()] # self.logger.info(f"Alive servers: {', '.join(alive)}") else: self.logger.debug("No neighbour to send 'Cluster IN'") self.logger.debug("Notify Cluster ended")
def _execute(self, params: Kwargs, timeout=None, context: Context = None) -> CompletedProcess: input_params = params['input'] cp = CompletedProcess() cp.set_start_time() # common parameters kwargs = self.system_kwargs kwargs['timeout'] = timeout or kwargs.get('timeout') kwargs['identity'] = context.env.get('executor_id') resp, exception = None, None def search_cost(ssa, route_list): cost = [ route['cost'] for route in route_list if str(ssa.server.id) == route['destination_id'] ] if cost: if cost[0] is None: cost = 999999 else: cost = cost[0] else: cost = 999999 return cost software = input_params.get('software', None) if is_valid_uuid(software): soft = Software.query.get(software) if not soft: cp.stderr = f"software id '{software}' not found" cp.success = False cp.set_end_time() return cp else: version = input_params.get('version', None) if version: parsed_ver = parse(str(version)) soft_list = [ s for s in Software.query.filter_by(name=software).all() if s.parsed_version == parsed_ver ] else: soft_list = sorted( Software.query.filter_by(name=software).all(), key=lambda x: x.parsed_version) if soft_list: soft = soft_list[-1] else: cp.stderr = f"No software found for '{software}'" + ( f" and version '{version}'" if version else "") cp.success = False cp.set_end_time() return cp if not soft.ssas: cp.stderr = f"{soft.id} has no server association" cp.success = False cp.set_end_time() return cp # Server validation server = input_params.get('server', None) if is_valid_uuid(server): dest_server = Server.query.get(server) else: dest_server = Server.query.filter_by(name=server).one_or_none() if not dest_server: cp.stderr = f"destination server {'id ' if is_valid_uuid(server) else ''}'{server}' not found" cp.success = False cp.set_end_time() return cp # decide best server source resp = ntwrk.get(dest_server, 'api_1_0.routes', timeout=10) if resp.code == 200: ssas = copy.copy(soft.ssas) ssas.sort(key=functools.partial(search_cost, route_list=resp.msg['route_list'])) else: ssas = soft.ssas server = ssas[0].server # Process kwargs data = { 'software_id': soft.id, 'dest_server_id': dest_server.id, "background": False, "include_transfer_data": True, "force": True } if input_params.get('dest_path', None): data.update(dest_path=input_params.get('dest_path', None)) if input_params.get('chunk_size', None): data.update(chunk_size=input_params.get('chunk_size', None)) if input_params.get('max_senders', None): data.update(max_senders=input_params.get('max_senders', None)) # run request resp = ntwrk.post(server, 'api_1_0.send', json=data, **kwargs) cp.stdout = flask.json.dumps(resp.msg) if isinstance( resp.msg, dict) else resp.msg cp.stderr = str(resp.exception) if str( resp.exception) else resp.exception.__class__.__name__ cp.rc = resp.code if resp.exception is None: self.evaluate_result(cp) cp.set_end_time() return cp