def send_request(self, request, root_certificate=None): generic_request = ProtoUtil.compose_generic_request(request=request) if self.RESPONSE_MESSAGE_TYPE: generic_request.message_type = ProtoUtil.infer_str_from_message_type( message_type=self.RESPONSE_MESSAGE_TYPE) self._logger.info("Client getting request of uuid [" + generic_request.uuid + '] in client [' + self.get_client_name() + '].') self.sys_log("Client getting request of uuid [" + generic_request.uuid + '] in client [' + self.get_client_name() + '].') try: options = [ ('grpc.max_receive_message_length', int( EnvUtil.get_pslx_env_variable( var='PSLX_GRPC_MAX_MESSAGE_LENGTH'))), ('grpc.max_send_message_length', int( EnvUtil.get_pslx_env_variable( var='PSLX_GRPC_MAX_MESSAGE_LENGTH'))), ] timeout = int( EnvUtil.get_pslx_env_variable(var='PSLX_GRPC_TIMEOUT')) if not root_certificate: self._logger.info("Start with insecure channel in client [" + self.get_client_name() + '].') with grpc.insecure_channel(self._server_url, options=options) as channel: stub = GenericRPCServiceStub(channel=channel) response = stub.SendRequest(request=generic_request, timeout=timeout) else: self._logger.info("Start with secure channel in client [" + self.get_client_name() + '].') channel_credential = grpc.ssl_channel_credentials( root_certificate) with grpc.secure_channel(self._server_url, channel_credential, options=options) as channel: stub = GenericRPCServiceStub(channel=channel) response = stub.SendRequest(request=generic_request, timeout=timeout) if not self.RESPONSE_MESSAGE_TYPE: self.sys_log( "Response message type unset, return None instead.") return None else: return ProtoUtil.any_to_message( message_type=self.RESPONSE_MESSAGE_TYPE, any_message=response.response_data) except Exception as err: self._logger.error("send request with error " + str(err) + ' in client [' + self.get_client_name() + '].') self.sys_log("send request with error " + str(err) + ' in client [' + self.get_client_name() + '].') return None
def __init__(self, client_name, server_url): super().__init__(client_name=client_name, server_url=server_url) self._logger = glogging.get_logger( log_name=self.get_client_name(), log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/container_backend_client') self._backend_folder = FileUtil.join_paths_to_dir( root_dir=EnvUtil.get_pslx_env_variable( 'PSLX_INTERNAL_METADATA_DIR'), base_name='PSLX_CONTAINER_BACKEND_TABLE')
def convert_local_to_cell_path(cls, path, cell=''): if not path: return '' if cls.is_local_path(path) and EnvUtil.get_other_env_variable( var='GALAXY_fs_cell'): cell_name = cell if cell else EnvUtil.get_other_env_variable( var='GALAXY_fs_cell') path = path.replace('/LOCAL', '/galaxy/' + cell_name + '-d') return path
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._logger = LoggingTool( name='PSLX_CONTAINER_BACKEND_RPC', ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL')) self._lru_cache_tool = LRUCacheTool( max_capacity=EnvUtil.get_pslx_env_variable( var='PSLX_INTERNAL_CACHE')) self._backend_folder = FileUtil.join_paths_to_dir( root_dir=EnvUtil.get_pslx_env_variable('PSLX_DATABASE'), base_name='PSLX_CONTAINER_BACKEND_TABLE')
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._logger = glogging.get_logger( log_name='PSLX_CONTAINER_BACKEND_RPC', log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/container_backend_rpc') self._lru_cache_tool = LRUCacheTool(max_capacity=int( EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_CACHE'))) self._backend_folder = FileUtil.join_paths_to_dir( root_dir=EnvUtil.get_pslx_env_variable( 'PSLX_INTERNAL_METADATA_DIR'), base_name='PSLX_CONTAINER_BACKEND_TABLE')
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._lru_cache_tool = LRUCacheTool( max_capacity=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_CACHE') ) self._storage_type_to_impl_func = { StorageType.DEFAULT_STORAGE: self._default_storage_impl, StorageType.FIXED_SIZE_STORAGE: self._fixed_size_storage_impl, StorageType.PROTO_TABLE_STORAGE: self._proto_table_storage_impl, StorageType.PARTITIONER_STORAGE: self._partitioner_storage_impl, } self._logger = LoggingTool( name='PSLX_RPC_IO_RPC', ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL') )
def __init__(self, container_name): super().__init__(container_name) self._logger = glogging.get_logger( log_name=(ProtoUtil.get_name_by_value(enum_type=DataModelType, value=self.DATA_MODEL) + '__' + self.get_class_name() + '__' + container_name), log_dir=EnvUtil.get_pslx_env_variable('PSLX_DEFAULT_LOG_DIR'))
def get_cell_path_local_path(cls, path, cell=''): if cls.is_local_path(path) or '/galaxy/' not in path: return path try: this_cell = cell if cell else EnvUtil.get_other_env_variable( var='GALAXY_fs_cell') with open( EnvUtil.get_other_env_variable('GALAXY_fs_global_config'), 'r') as infile: config = json.load(infile) root = config[this_cell]['fs_root'] if root and root[-1] != '/': root += '/' return root + '/'.join(path.split('/')[3:]) except Exception as _: return ''
def __init__(self, connection_str): super().__init__() self._logger = LoggingTool( name=self.get_class_name(), ttl=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_TTL')) self._connection_str = connection_str self._queue_consumers = []
def get_response_impl(backend_folder, request, lru_cache=None): storage_value = ContainerBackendValue() storage_value.container_name = request.container_name storage_value.container_status = request.status for operator_name, operator_snapshot in dict( request.operator_snapshot_map).items(): operator_info = ContainerBackendValue.OperatorInfo() operator_info.status = operator_snapshot.status for parent in operator_snapshot.node_snapshot.parents_names: operator_info.parents.append(parent) operator_info.start_time = operator_snapshot.start_time operator_info.end_time = operator_snapshot.end_time operator_info.log_file = operator_snapshot.log_file storage_value.operator_info_map[operator_name].CopyFrom( operator_info) storage_value.mode = request.mode storage_value.data_model = request.data_model storage_value.updated_time = str(TimezoneUtil.cur_time_in_pst()) storage_value.start_time = request.start_time storage_value.end_time = request.end_time storage_value.log_file = request.log_file storage_value.run_cell = request.run_cell storage_value.snapshot_cell = request.snapshot_cell for key in request.counters: storage_value.counters[key] = request.counters[key] storage_value.ttl = int( EnvUtil.get_pslx_env_variable('PSLX_BACKEND_CONTAINER_TTL')) storage = lru_cache.get( key=storage_value.container_name) if lru_cache else None if not storage: storage = ProtoTableStorage() storage.initialize_from_file(file_name=FileUtil.join_paths_to_file( root_dir=backend_folder, base_name=storage_value.container_name + '.pb')) if lru_cache: lru_cache.set(key=backend_folder, value=storage) all_data = storage.read_all() if len(all_data) >= int( EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE')) > 0: key_to_delete = sorted(all_data.keys())[0] storage.delete(key=key_to_delete) storage.write(data={storage_value.start_time: storage_value})
def __init__(self, connection_str): super().__init__() self._logger = glogging.get_logger( log_name=self.get_class_name(), log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/msg_queue_consumer' ) self._connection_str = connection_str self._queue_consumers = []
def check_health_and_qps(cls, server_url): request = HealthCheckerRequest() request.server_url = server_url timeout = int(EnvUtil.get_pslx_env_variable(var='PSLX_GRPC_TIMEOUT')) with grpc.insecure_channel(server_url) as channel: stub = GenericRPCServiceStub(channel=channel) try: response = stub.CheckHealth( request=request, metadata=[ ('pslx_rpc_password', EnvUtil.get_pslx_env_variable('PSLX_RPC_PASSWORD')) ], timeout=timeout) return response.server_status, response.server_qps except Exception as _: return Status.FAILED, 0
def __init__(self): super().__init__(operator_name='FRONTEND_DEDICATED_LOGGING_OP') self._pslx_dedicated_logging_storage = ProtoTableStorage(logger=pslx_frontend_logger) self._pslx_dedicated_logging_storage.initialize_from_file( file_name=pslx_dedicated_logging_storage_path ) self._cached_logging = {} self._logging_storage_capacity = max(int(EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_CACHE')), 700)
def __init__(self, channel_name, webhook_url, server_url): super().__init__(client_name=self.get_class_name(), server_url=server_url) self._webhook_url = webhook_url self._channel_name = channel_name self._logger = glogging.get_logger( log_name=channel_name, log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/im_client' )
def __init__(self, channel_name, webhook_url, server_url): super().__init__(client_name=self.get_class_name(), server_url=server_url) self._webhook_url = webhook_url self._channel_name = channel_name self._logger = LoggingTool( name=channel_name, ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL'))
def __init__( self, logger=None, max_capacity=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE')): super().__init__(logger=logger) self._file_tree = None self._max_capacity = int(max_capacity) self._underlying_storage = DefaultStorage(logger=logger)
def send_request(self, request): self._response = None generic_request = GenericRPCRequest() generic_request.request_data.CopyFrom( ProtoUtil.message_to_any(message=request)) generic_request.timestamp = str(TimezoneUtil.cur_time_in_pst()) self._corr_id = generic_request.uuid = str(uuid.uuid4()) if self.RESPONSE_MESSAGE_TYPE: generic_request.message_type = ProtoUtil.infer_str_from_message_type( message_type=self.RESPONSE_MESSAGE_TYPE) self._SYS_LOGGER.info("Getting request of uuid [" + generic_request.uuid + '] in queue [' + self.get_queue_name() + '].') self._logger.info("Getting request of uuid [" + generic_request.uuid + '] in queue [' + self.get_queue_name() + '].') try: generic_request_str = ProtoUtil.message_to_string( proto_message=generic_request) self._channel.basic_publish( exchange='', routing_key=self._queue_name, properties=pika.BasicProperties(reply_to=self._callback_queue, correlation_id=self._corr_id), body=base64.b64encode(generic_request_str)) wait_start_time = TimezoneUtil.cur_time_in_pst() while not self._response: self._connection.process_data_events(time_limit=int( EnvUtil.get_pslx_env_variable('PSLX_QUEUE_TIMEOUT'))) if TimezoneUtil.cur_time_in_pst() - wait_start_time > \ datetime.timedelta(seconds=int(EnvUtil.get_pslx_env_variable('PSLX_QUEUE_TIMEOUT'))): break if not self.RESPONSE_MESSAGE_TYPE or self._response is None: return None else: return ProtoUtil.any_to_message( message_type=self.RESPONSE_MESSAGE_TYPE, any_message=self._response.response_data) except Exception as err: self._logger.error('Queue [' + self.get_queue_name() + "] send request with error " + str(err) + '.', publish=True) self._SYS_LOGGER.error('Queue [' + self.get_queue_name() + "] send request with error " + str(err) + '.')
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._logger = LoggingTool( name="PSLX_EMAIL_RPC", ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL')) self._credentials = {} self._email_servers = {}
def _auth(self, context): if context: auth = {} for key, value in context.invocation_metadata(): auth[key] = value if ('pslx_rpc_password' not in auth or auth['pslx_rpc_password'] != EnvUtil.get_pslx_env_variable('PSLX_RPC_PASSWORD')): return False return True
def __init__(self, server_name): super().__init__() self._server_name = server_name self._logger = glogging.get_logger( log_name=self.get_server_name(), log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/generic_server' ) self._url = None self._rpc_server = None self._has_added_rpc = False
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._logger = glogging.get_logger( log_name="PSLX_EMAIL_RPC", log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/email_rpc') self._credentials = {} self._email_servers = {}
def view_proto_table(): value_types = EnvUtil.get_all_schemas( pslx_frontend_ui_app.config['schemas']) if request.method == 'POST': try: proto_table_path = request.form['proto_table_path'].strip() selected_value_type = request.form['value_type'].strip() modules = selected_value_type.split('.') module, value_type = '.'.join(modules[:-1]), modules[-1] pslx_frontend_logger.info("Proto table viewer input path [" + proto_table_path + '] with value type [' + value_type + '] in module name [' + module + '].') result = FileUtil.read_proto_from_file(proto_type=ProtoTable, file_name=proto_table_path) value_type = ProtoUtil.infer_message_type_from_str( message_type_str=value_type, modules=module) proto_contents = [] result_content = dict(result.data) for key in sorted(result_content.keys()): proto_val = ProtoUtil.any_to_message( message_type=value_type, any_message=result_content[key]) try: proto_contents.append({ 'key': key, 'val': ProtoUtil.message_to_text(proto_message=proto_val), }) except Exception as err: pslx_frontend_logger.error( "Proto table viewer Parsing proto with error " + str(err) + '.') proto_contents.append({ 'key': key, 'val': str(proto_val), }) value_types.remove(selected_value_type) return render_template('proto_table_viewer.html', proto_contents=proto_contents, value_types=value_types, selected_value_type=selected_value_type) except Exception as err: pslx_frontend_logger.error( "Got error rendering proto_table_viewer.html: " + str(err) + '.') return render_template('proto_table_viewer.html', proto_contents=[], value_types=value_types, selected_value_type='') else: return render_template('proto_table_viewer.html', proto_contents=[], value_types=value_types, selected_value_type='')
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._type_to_sender_map = { InstantMessagingType.SLACK: self._send_by_slack, InstantMessagingType.ROCKETCHAT: self._send_by_rocketchat, InstantMessagingType.TEAMS: self._send_by_teams, } self._logger = LoggingTool( name='PSLX_INSTANT_MESSAGING_RPC', ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL') )
def __init__(self, rpc_storage): super().__init__(service_name=self.get_class_name(), rpc_storage=rpc_storage) self._type_to_sender_map = { InstantMessagingType.SLACK: self._send_by_slack, InstantMessagingType.ROCKETCHAT: self._send_by_rocketchat, InstantMessagingType.TEAMS: self._send_by_teams, } self._logger = glogging.get_logger( log_name='PSLX_INSTANT_MESSAGING_RPC', log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/im_rpc' )
def __init__(self, consumer_name): super().__init__() self._consumer_name = consumer_name self._logger = LoggingTool( name=consumer_name, ttl=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_TTL')) self._connection_str = '' self._exchange = '' self._connection = None self._queue = None self._thread = None self._has_added_queue = False
def get_response_and_status_impl(self, request): storage_value = ContainerBackendValue() storage_value.container_name = request.container_name storage_value.container_status = request.status for operator_name, operator_snapshot in dict( request.operator_snapshot_map).items(): operator_info = ContainerBackendValue.OperatorInfo() operator_info.status = operator_snapshot.status for parent in operator_snapshot.node_snapshot.parents_names: operator_info.parents.append(parent) operator_info.start_time = operator_snapshot.start_time operator_info.end_time = operator_snapshot.end_time storage_value.operator_info_map[operator_name].CopyFrom( operator_info) storage_value.mode = request.mode storage_value.data_model = request.data_model storage_value.updated_time = str(TimezoneUtil.cur_time_in_pst()) storage_value.start_time = request.start_time storage_value.end_time = request.end_time storage_value.log_dir = request.log_dir for key in request.counters: storage_value.counters[key] = request.counters[key] partitioner_dir = FileUtil.join_paths_to_dir_with_mode( root_dir=FileUtil.join_paths_to_dir( root_dir=self._backend_folder, base_name=ProtoUtil.get_name_by_value( enum_type=DataModelType, value=storage_value.data_model)), base_name=storage_value.container_name, ttl=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_TTL')) if storage_value.mode == ModeType.TEST: partitioner_dir = partitioner_dir.replace('PROD', 'TEST') storage = self._lru_cache_tool.get(key=partitioner_dir) if not storage: self.sys_log( "Did not find the storage in cache. Making a new one...") storage = DailyPartitionerStorage() proto_table = ProtoTableStorage() storage.set_underlying_storage(storage=proto_table) storage.initialize_from_dir(dir_name=partitioner_dir) self._lru_cache_tool.set(key=partitioner_dir, value=storage) else: self.sys_log("Found key in LRU cache.") storage.write(data={storage_value.container_name: storage_value}, params={ 'overwrite': True, 'make_partition': True, }) return None, Status.SUCCEEDED
def __init__(self, container_name, logger=DummyUtil.dummy_logger()): super().__init__() self._container_name = container_name self._is_initialized = False self._snapshot_file_folder = FileUtil.join_paths_to_dir( EnvUtil.get_pslx_env_variable(var='PSLX_SNAPSHOT_DIR'), self._container_name) self._start_time = None self._end_time = None self._logger = logger self._upstream_ops = [] self._backend = None self._status = Status.IDLE self._counter = defaultdict(int)
def __init__(self, service_name, rpc_storage=None): Base.__init__(self) self._logger = DummyUtil.dummy_logger() self._service_name = service_name if rpc_storage: assert rpc_storage.get_storage_type() == StorageType.PARTITIONER_STORAGE if 'ttl' not in rpc_storage.get_dir_name(): self._SYS_LOGGER.warning("Warning. Please ttl the request log table.") underlying_storage = ProtoTableStorage() rpc_storage.set_underlying_storage(storage=underlying_storage) rpc_storage.set_max_capacity(max_capacity=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE')) self._rpc_storage = rpc_storage self._request_timestamp = collections.deque() self._request_response_pair = {}
def __init__(self, consumer_name): super().__init__() self._consumer_name = consumer_name self._logger = glogging.get_logger( log_name=consumer_name, log_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DEFAULT_LOG_DIR') + 'PSLX_INTERNAL/msg_queue_consumer' ) self._connection_str = '' self._connection = None self._queue = None self._thread = None self._has_added_queue = False
def sys_log(cls, string): if EnvUtil.get_pslx_env_variable(var='PSLX_LOG'): try: caller = getframeinfo(stack()[1][0]) print('[SYS-LOG] ' + ColorsUtil.make_foreground_green( '[file: %s]' % FileUtil.base_name(caller.filename)) + ' ' + ColorsUtil.make_foreground_yellow('[line: %d]' % caller.lineno) + ' ' + ColorsUtil.make_foreground_red( '[%s]' % str(TimezoneUtil.cur_time_in_pst())) + ': ' + string) except Exception as _: print('[SYS-LOG] ' + ColorsUtil.make_foreground_red( '[%s]' % str(TimezoneUtil.cur_time_in_pst())) + ': ' + string)