Exemple #1
0
    def get_response_and_status_impl(self, request):
        storage_value = ContainerBackendValue()
        storage_value.container_name = request.container_name
        storage_value.container_status = request.status
        for operator_name, operator_snapshot in dict(
                request.operator_snapshot_map).items():
            operator_info = ContainerBackendValue.OperatorInfo()
            operator_info.status = operator_snapshot.status
            for parent in operator_snapshot.node_snapshot.parents_names:
                operator_info.parents.append(parent)

            operator_info.start_time = operator_snapshot.start_time
            operator_info.end_time = operator_snapshot.end_time
            storage_value.operator_info_map[operator_name].CopyFrom(
                operator_info)

        storage_value.mode = request.mode
        storage_value.data_model = request.data_model
        storage_value.updated_time = str(TimezoneUtil.cur_time_in_pst())
        storage_value.start_time = request.start_time
        storage_value.end_time = request.end_time
        storage_value.log_dir = request.log_dir
        for key in request.counters:
            storage_value.counters[key] = request.counters[key]
        partitioner_dir = FileUtil.join_paths_to_dir_with_mode(
            root_dir=FileUtil.join_paths_to_dir(
                root_dir=self._backend_folder,
                base_name=ProtoUtil.get_name_by_value(
                    enum_type=DataModelType, value=storage_value.data_model)),
            base_name=storage_value.container_name,
            ttl=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_TTL'))
        if storage_value.mode == ModeType.TEST:
            partitioner_dir = partitioner_dir.replace('PROD', 'TEST')
        storage = self._lru_cache_tool.get(key=partitioner_dir)
        if not storage:
            self.sys_log(
                "Did not find the storage in cache. Making a new one...")
            storage = DailyPartitionerStorage()
            proto_table = ProtoTableStorage()
            storage.set_underlying_storage(storage=proto_table)
            storage.initialize_from_dir(dir_name=partitioner_dir)
            self._lru_cache_tool.set(key=partitioner_dir, value=storage)
        else:
            self.sys_log("Found key in LRU cache.")

        storage.write(data={storage_value.container_name: storage_value},
                      params={
                          'overwrite': True,
                          'make_partition': True,
                      })
        return None, Status.SUCCEEDED
Exemple #2
0
 def __init__(self, container_name, logger=DummyUtil.dummy_logging(), ttl=-1):
     super().__init__()
     self._container_name = container_name
     self._is_initialized = False
     self._snapshot_file_folder = FileUtil.join_paths_to_dir_with_mode(
         root_dir=FileUtil.join_paths_to_dir(
             root_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DATABASE'),
             base_name='snapshots'
         ),
         base_name=self.get_class_name() + '__' + container_name,
         ttl=ttl
     )
     self._start_time = None
     self._end_time = None
     self._logger = logger
     self._upstream_ops = []
     self._backend = None
     self._status = Status.IDLE
     self._counter = defaultdict(int)
Exemple #3
0
    ttl=EnvUtil.get_pslx_env_variable(var='PSLX_INTERNAL_TTL'))

frontend_config_file = EnvUtil.get_pslx_env_variable(
    'PSLX_FRONTEND_CONFIG_PROTO_PATH')

assert frontend_config_file != ''
pslx_frontend_ui_app.config['frontend_config'] = FileUtil.read_proto_from_file(
    proto_type=FrontendConfig, file_name=frontend_config_file)

pslx_frontend_ui_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
pslx_frontend_ui_app.config['SQLALCHEMY_DATABASE_URI'] =\
    'sqlite:///' + pslx_frontend_ui_app.config['frontend_config'].sqlalchemy_database_path
pslx_frontend_logger.info(
    "sqlalchemy database uri " +
    str(pslx_frontend_ui_app.config['SQLALCHEMY_DATABASE_URI']) + '.')

pslx_frontend_db = SQLAlchemy(pslx_frontend_ui_app)

pslx_partitioner_lru_cache = LRUCacheTool(
    max_capacity=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE'))
pslx_proto_table_lru_cache = LRUCacheTool(
    max_capacity=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE'))

pslx_dedicated_logging_storage_path = FileUtil.join_paths_to_dir_with_mode(
    root_dir=EnvUtil.get_pslx_env_variable('PSLX_DATABASE') +
    '/PSLX_DEDICATED_LOGGING',
    base_name='dedicated_logging.pb')

from pslx.micro_service.frontend.renderer import index_renderer, file_viewer_renderer, proto_viewer_renderer, \
    container_backend_renderer, proto_table_viewer_renderer, logging_renderer
Exemple #4
0
        slack_payload = "payload={'text':'" + request.message + "\nCurrent time is "\
                        + str(TimezoneUtil.cur_time_in_pst()) + "'}"
        status = Status.SUCCEEDED
        try:
            requests.post(request.webhook_url,
                          data=slack_payload,
                          headers=header)
        except Exception as err:
            self._logger.error("Slack failed to send message with err " +
                               str(err))
            status = Status.FAILED
        return None, status


if __name__ == "__main__":
    consumer = GenericConsumer(
        connection_str='amqp://*****:*****@localhost:5672')

    partitioner_dir = FileUtil.join_paths_to_dir_with_mode(
        root_dir=FileUtil.join_paths_to_dir(
            root_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DATABASE'),
            base_name='msg_queue'),
        base_name='msg_queue_example',
        ttl='1h')
    storage = DailyPartitionerStorage()
    storage.initialize_from_dir(dir_name=partitioner_dir)

    slack_queue = SlackQueue(queue_name='slack_queue', queue_storage=storage)
    consumer.bind_queue(exchange='slack_exchange', queue=slack_queue)
    consumer.start_consumer()
Exemple #5
0
from pslx.micro_service.proto_viewer.rpc import ProtoViewerRPC
from pslx.micro_service.rpc.generic_server import GenericServer
from pslx.storage.partitioner_storage import MinutelyPartitionerStorage
from pslx.util.file_util import FileUtil

if __name__ == "__main__":
    server_url = "localhost:11444"
    partitioner_dir = FileUtil.join_paths_to_dir_with_mode(
        root_dir='database/proto_viewer/',
        base_name='proto_viewer_example',
        ttl='1h')
    storage = MinutelyPartitionerStorage()
    storage.initialize_from_dir(dir_name=partitioner_dir)
    example_rpc = ProtoViewerRPC(rpc_storage=storage)
    example_server = GenericServer(server_name='example')
    example_server.create_server(max_worker=1, server_url=server_url)
    example_server.bind_rpc(rpc=example_rpc)
    example_server.start_server()
Exemple #6
0
from pslx.micro_service.container_backend.rpc import ContainerBackendRPC
from pslx.micro_service.rpc.generic_server import GenericServer
from pslx.storage.partitioner_storage import MinutelyPartitionerStorage
from pslx.util.file_util import FileUtil

if __name__ == "__main__":
    server_url = "localhost:11443"
    partitioner_dir = FileUtil.join_paths_to_dir_with_mode(
        root_dir='database/container_backend/',
        base_name='container_backend_example',
        ttl=1)
    storage = MinutelyPartitionerStorage()
    storage.initialize_from_dir(dir_name=partitioner_dir)
    example_rpc = ContainerBackendRPC(rpc_storage=storage)
    example_server = GenericServer(server_name='example_backend')
    example_server.create_server(max_worker=1, server_url=server_url)
    example_server.bind_rpc(rpc=example_rpc)
    example_server.start_server()