def _setup_migration_logstore(endpoint, project, access_key_id, access_key): log_client = LogClient( endpoint=endpoint, accessKeyId=access_key_id, accessKey=access_key, ) try: log_client.create_logstore( project_name=project, logstore_name=_migration_logstore, ) except LogException as exc: if exc.get_error_code() != "LogStoreAlreadyExist": raise try: tokens = [ ',', ' ', "'", '"', ';', '=', '(', ')', '[', ']', '{', '}', '?', '@', '&', '<', '>', '/', ':', '\n', '\t', '\r', ] line_config = IndexLineConfig(token_list=tokens) config = IndexConfig(line_config=line_config) log_client.create_index(project, _migration_logstore, config) except LogException as exc: if exc.get_error_code() != "IndexAlreadyExist": raise
def main(): endpoint = os.environ.get('ALIYUN_LOG_SAMPLE_ENDPOINT', '') accessKeyId = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSID', '') accessKey = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSKEY', '') project = 'python-sdk-test' + str(time()).replace('.', '-') logstore = 'logstore' assert endpoint and accessKeyId and accessKey, ValueError( "endpoint/access_id/key cannot be empty") client = LogClient(endpoint, accessKeyId, accessKey, "") print("****create project", project) client.create_project(project, "SDK test") sleep(10) try: print("****create logstore", logstore) client.create_logstore(project, logstore, 1, 1) sleep(60) test_log_handler(endpoint, accessKeyId, accessKey, project, logstore) sleep(60) res = client.pull_log(project, logstore, 0, time() - 3600, time()) for x in res: print(x.get_flatten_logs_json()) assert len(x.get_flatten_logs_json()) == 10 break # test extract json test_log_handler_json(endpoint, accessKeyId, accessKey, project, logstore) # test extracct kv test_log_handler_kv(endpoint, accessKeyId, accessKey, project, logstore) sleep(60) # test using file to configure logger os.environ['ALIYUN_LOG_SAMPLE_TMP_PROJECT'] = project config_path = os.sep.join([os.path.dirname(__file__), 'logging.conf']) logging.config.fileConfig(config_path) # create logger logger = logging.getLogger('sls') logger.info("log hanlder test via config file") sleep(20) finally: clean_project(client, project)
# The SLS Log Store name log_store_name = 'actiontrail_{trail_name}'.format(trail_name=trail_name) # 创建日志服务 # Create a Log Project res = client.create_project(log_project_name, '操作审计事件日志项目') res.log_print() # 等待一段时间,因为创建日志项目是异步的 # Wait for a while, because create a Log Project is asynchronous time.sleep(120) # 创建日志库 # Create Log Store res = client.create_logstore(log_project_name, log_store_name, shard_count=3, preserve_storage=True) res.log_print() # 从log_index.json中读取索引配置 # Get Log Index config from json file index_json = get_json_data('./log_config/log_index.json') index_detail = IndexConfig() index_detail.from_json(index_json) # 创建索引 # Create Log Index res = client.create_index(log_project_name, log_store_name, index_detail) res.log_print() # 从log_dashboard.json中读取报表配置 # Get Log Dashboard config from json file