def test_cur_time_from_str_1(self): time_str = "2020-02-20 00:18:14.713721-08:00" expect_result = datetime.datetime( 2020, 2, 20, 0, 18, 14, 713721).replace(tzinfo=TimezoneObj.WESTERN_TIMEZONE) result = TimezoneUtil.cur_time_from_str(time_str=time_str) self.assertEqual(result.replace(tzinfo=None), expect_result.replace(tzinfo=None))
def get_containers_info(): containers_info = [] existing_containers = {} all_proto_files = set() if not FileUtil.is_local_path(backend_folder): all_cells = [''] else: all_cells = gclient.list_cells() for cell_name in all_cells: folder = FileUtil.convert_local_to_cell_path( path=backend_folder, cell=cell_name) proto_files = FileUtil.list_files_in_dir(folder) all_proto_files = all_proto_files.union(set(proto_files)) for proto_file in all_proto_files: storage = ProtoTableStorage() storage.initialize_from_file( file_name=proto_file ) raw_data = storage.read_all() if not raw_data: continue key = sorted(raw_data.keys())[-1] val = raw_data[key] result_proto = ProtoUtil.any_to_message( message_type=ContainerBackendValue, any_message=val ) ttl = result_proto.ttl if ttl > 0 and result_proto.updated_time and TimezoneUtil.cur_time_in_pst() - TimezoneUtil.cur_time_from_str( result_proto.updated_time) >= datetime.timedelta(days=ttl): FileUtil.remove_file(storage.get_file_name()) else: container_info = { 'container_name': result_proto.container_name, 'status': ProtoUtil.get_name_by_value( enum_type=Status, value=result_proto.container_status), 'updated_time': result_proto.updated_time, 'mode': ProtoUtil.get_name_by_value(enum_type=ModeType, value=result_proto.mode), 'data_model': ProtoUtil.get_name_by_value( enum_type=DataModelType, value=result_proto.data_model), 'run_cell': result_proto.run_cell, 'snapshot_cell': result_proto.snapshot_cell, } if container_info['container_name'] not in existing_containers: existing_containers[container_info['container_name']] = container_info['updated_time'] containers_info.append(container_info) else: if container_info['updated_time'] >= existing_containers[container_info['container_name']]: containers_info.append(container_info) return containers_info
def test_cur_time_from_str_3(self): time_str = "2020-02-20 00:18:14" expect_result = datetime.datetime(2020, 2, 20, 0, 18, 14).replace() result = TimezoneUtil.cur_time_from_str(time_str=time_str) self.assertEqual(result, expect_result)
def _partitioner_storage_impl(self, request): self._logger.info("Getting request of partitioner storage read.") read_params = dict(request.params) is_proto_table = True if read_params['is_proto_table'] == '1' else False if 'base_name' in read_params: base_name = read_params['base_name'] else: base_name = 'data.pb' if is_proto_table else 'data' lru_key = (read_params['PartitionerStorageType'], request.dir_name) self._logger.info("Partitioner type is " + read_params['PartitionerStorageType']) storage = self._lru_cache_tool.get(key=lru_key) if not storage: self.sys_log("Did not find the storage in cache. Making a new one...") partitioner_type = ProtoUtil.get_value_by_name( enum_type=PartitionerStorageType, name=read_params['PartitionerStorageType'] ) storage = self.PARTITIONER_TYPE_TO_IMPL[partitioner_type]() storage.initialize_from_dir(dir_name=request.dir_name) self._lru_cache_tool.set( key=lru_key, value=storage ) else: self.sys_log("Found key in LRU cache.") self._logger.info('Current cache size ' + str(self._lru_cache_tool.get_cur_capacity())) read_params.pop('PartitionerStorageType', None) read_params.pop('is_proto_table', None) if is_proto_table: proto_table_storage = ProtoTableStorage() storage.set_underlying_storage(storage=proto_table_storage) else: read_params['num_line'] = -1 response = RPCIOResponse() if 'start_time' not in read_params: # calling read function if is_proto_table: # if underlying storage is proto table. if 'message_type' in read_params: assert 'proto_module' in read_params read_params['message_type'] = ProtoUtil.infer_message_type_from_str( message_type_str=read_params['message_type'], modules=read_params['proto_module'] ) proto_storage = ProtoTableStorage() if 'read_oldest' in read_params: proto_storage.initialize_from_file( file_name=FileUtil.join_paths_to_file( root_dir=storage.get_oldest_dir_in_root_directory(), base_name=base_name ) ) else: proto_storage.initialize_from_file( file_name=FileUtil.join_paths_to_file( root_dir=storage.get_latest_dir(), base_name=base_name ) ) data = proto_storage.read_all() for key, val in data.items(): rpc_list_data = RPCIOResponse.RPCListData() rpc_data = rpc_list_data.data.add() rpc_data.proto_data.CopyFrom(val) response.dict_data[key].CopyFrom(rpc_list_data) else: # if underlying storage is not proto table. default_storage = DefaultStorage() if 'read_oldest' in read_params: default_storage.initialize_from_file( file_name=FileUtil.join_paths_to_file( root_dir=storage.get_oldest_dir_in_root_directory(), base_name=base_name ) ) else: default_storage.initialize_from_file( file_name=FileUtil.join_paths_to_file( root_dir=storage.get_latest_dir(), base_name=base_name ) ) data = default_storage.read(params={ 'num_line': -1, }) rpc_list_data = RPCIOResponse.RPCListData() for item in data: rpc_data = rpc_list_data.data.add() rpc_data.string_data = item response.list_data.CopyFrom(rpc_list_data) else: # calling read_range function if 'start_time' in read_params: read_params['start_time'] = TimezoneUtil.cur_time_from_str( time_str=read_params['start_time'] ) if 'end_time' in read_params: read_params['end_time'] = TimezoneUtil.cur_time_from_str( time_str=read_params['end_time'] ) data = storage.read_range(params=read_params) if data: for key, val in data.items(): rpc_list_data = RPCIOResponse.RPCListData() if is_proto_table: for proto_key, any_message in val.items(): rpc_data = rpc_list_data.data.add() rpc_data.string_data = proto_key rpc_data = rpc_list_data.data.add() rpc_data.proto_data.CopyFrom(any_message) else: for entry in val: rpc_data = rpc_list_data.data.add() rpc_data.string_data = entry response.dict_data[key].CopyFrom(rpc_list_data) return response