Exemple #1
0
def main():
    project = "my-test-project"
    sink = AliyunOSSSink("my-test-roleArn", "my-test-bucket", "my-prefix", "", \
                         "%Y/%m/%d/%H/%M", "time", 256, 300, "", "csv", "none", \
                         {
                             "delimiter": ",",
                             "quote": "",
                             "lineFeed": "\n",
                             "columns": ["__topic__", "__source__"],
                         }
                         )

    exportConfiguration = ExportConfiguration()
    exportConfiguration.setRoleArn("my-test-roleArn")
    exportConfiguration.setLogstore("oss-source")
    exportConfiguration.setSink(sink)
    exportConfiguration.setFromTime(int(time.time())-864000)
    exportConfiguration.setToTime(0)
    export = Export()
    export.setConfiguration(exportConfiguration)
    export.setName("my-oss-sink")
    export.setDisplayName("my-oss-sink")
    client = LogClient("region", "ak", "ak_key")
    response = client.create_export(project, export)
    print(response.get_request_id())
    print(response.get_all_headers())
def cc_chart_get_geoip(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        try:
            domain = json_data['domain']
            req_sql = DOMAIN_GEO_SQL % (domain)
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        except:
            req_sql = GEO_SQL
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://',
                                                             '').replace(
                                                                 'http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project,
                             logstore=logstore,
                             fromTime=from_time,
                             toTime=int(time()),
                             topic='',
                             query=GEO_SQL)
        res = client.get_logs(req)
        for log_result in res.get_logs():
            geo_info = log_result.get_contents()['geo'].split(",")
            try:
                data.append({
                    'name': log_result.get_contents()['city'],
                    'ip_count': log_result.get_contents()['ip_count'],
                    'geo': [geo_info[1], geo_info[0]],
                    'count': log_result.get_contents()['count']
                })
            except:
                pass
        return_result['result'] = True
        return_result['message'] = data
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 400
        return JsonResponse(return_result, safe=False)
Exemple #3
0
class MigrationLogstore(object):
    def __init__(
        self,
        endpoint,
        access_id,
        access_key,
        project_name,
        logstore_name,
        topic,
        source,
    ):
        self._log_client = LogClient(
            endpoint=endpoint,
            accessKeyId=access_id,
            accessKey=access_key,
        )
        self._project_name = project_name
        self._logstore_name = logstore_name
        self._topic, self._source = topic, source

    @property
    def name(self):
        return self._logstore_name

    def put_logs(self, logitems):
        self._log_client.put_logs(
            PutLogsRequest(
                project=self._project_name,
                logstore=self._logstore_name,
                topic=self._topic,
                source=self._source,
                logitems=logitems,
            ))
def main():
    project = "my-test-project"
    sink = AliyunMaxComputeSink()
    sink.setOdpsRolearn("my-test-roleArn")
    sink.setOdpsEndpoint("my-test-endpoint")
    sink.setOdpsTunnelEndpoint("my-test-tunnelendpoint")
    sink.setOdpsProject("test")
    sink.setOdpsTable("my_test_table")
    sink.setTimeZone("+0800")
    sink.setFields(["acc_access_region", "http_method", "referer", "client_ip"])
    sink.setPartitionColumn(["bucket"])
    sink.setPartitionTimeFormat("%Y")
    exportConfiguration = ExportConfiguration()
    exportConfiguration.setRoleArn("my-test-roleArn")
    exportConfiguration.setLogstore("oss-source")
    exportConfiguration.setSink(sink)
    exportConfiguration.setFromTime(int(time.time())-864000)
    exportConfiguration.setToTime(0)
    export = Export()
    export.setConfiguration(exportConfiguration)
    export.setName("my-odps-sink")
    export.setDisplayName("my-odps-sink")
    client = LogClient("region", "ak", "ak_key")
    response = client.create_export(project, export)
    print(response.get_request_id())
    print(response.get_all_headers())
def attack_chart_get_type_top10(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        try:
            domain = json_data['domain']
            req_sql = DOMAIN_ATT_TYPE_TOP10 % (domain)
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        except:
            req_sql = ATT_TYPE_TOP10
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://',
                                                             '').replace(
                                                                 'http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project,
                             logstore=logstore,
                             fromTime=from_time,
                             toTime=int(time()),
                             topic='',
                             query=req_sql)
        res = client.get_logs(req)
        for log_result in res.get_logs():
            try:
                data.append({
                    'protection_type':
                    log_result.get_contents()['protection_type'],
                    'count':
                    log_result.get_contents()['count'],
                })
            except:
                pass
        return_result['result'] = True
        return_result['message'] = data
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 103
        return JsonResponse(return_result, safe=False)
 def verify_sls_connection(self, region, ak_id, ak_key, project, logstore):
     logclient = LogClient(region, ak_id, ak_key)
     try:
         res = logclient.get_logstore(project, logstore)
         return True, res.body
     except LogException as ex:
         return False, str(ex)
     except Exception as ex:
         return False, str(ex)
def cc_chart_get_black_ip_count(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        try:
            domain = json_data['domain']
            req_sql = DOMAIN_CC_BLACK_IP_COUNT % (domain)
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        except:
            req_sql = CC_BLACK_IP_COUNT
            if time_zone == "7day":
                from_time = int(time() - 604800)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://',
                                                             '').replace(
                                                                 'http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project,
                             logstore=logstore,
                             fromTime=from_time,
                             toTime=int(time()),
                             topic='',
                             query=req_sql)
        res = client.get_logs(req)
        black_ip_count = ''
        for log_result in res.get_logs():
            try:
                black_ip_count = log_result.get_contents()['black_ip_count']

            except:
                pass
        return_result['result'] = True
        return_result['black_ip_count'] = black_ip_count
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 400
        return JsonResponse(return_result, safe=False)
Exemple #8
0
def flow_chart_get_bad_upstream_count_trend(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        try:
            domain = json_data['domain']
            if time_zone == "7day":
                from_time = int(time() - 604800)
                req_sql = DOMAIN_BAD_UPSTREAM_COUNT_TREND_7D%(domain)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
                req_sql =DOMAIN_BAD_UPSTREAM_COUNT_TREND_24H%(domain)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
                req_sql = DOMAIN_BAD_UPSTREAM_COUNT_TREND_1H%(domain)
        except:
            if time_zone == "7day":
                from_time = int(time() - 604800)
                req_sql = UPSTREAM_BAD_COUNT_TREND_7D
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
                req_sql = UPSTREAM_BAD_COUNT_TREND_24H
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
                req_sql = UPSTREAM_BAD_COUNT_TREND_1H
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://', '').replace('http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project, logstore=logstore, fromTime=from_time, toTime=int(time()), topic='',
                             query=req_sql)
        res = client.get_logs(req)
        for log_result in res.get_logs():
            try:
                data.append({'time': log_result.get_contents()['time'],
                             'count': log_result.get_contents()['count'],
                             }
                            )
            except:
                pass
        return_result['result'] = True
        return_result['message'] = data
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 400
        return JsonResponse(return_result, safe=False)
def main():
    log_client = LogClient(endpoint=os.getenv("endpoint"),
                           accessKeyId=os.getenv("access_key_id"),
                           accessKey=os.getenv("access_key"))
    log_client.es_migration(hosts="elastic:elastic@localhost:9200",
                            indexes="all_data_types*",
                            project_name=os.getenv("project_name"),
                            scroll="2m",
                            pool_size=24,
                            time_reference="es_date",
                            source="my_source",
                            topic="my_topic",
                            wait_time_in_secs=60)
Exemple #10
0
class AliyunSLS:
    def __init__(self):
        self.client = LogClient(configs['endpoint'], configs['accessKeyId'],
                                configs['accessKey'])
        self.project = configs['logProjectName']

    def create_alert(self, detail, project=None):
        dashboard_name = detail['configuration']['dashboard']
        self.ensure_dashboard(dashboard_name, project=project)
        resp = self.client.create_alert(project or self.project, detail)
        return resp.body

    def update_alert(self, detail, project=None):
        resp = self.client.update_alert(project or self.project, detail)
        return resp.body

    def delete_alert(self, name, project=None):
        resp = self.client.delete_alert(project or self.project, name)
        return resp.body

    def get_alert(self, name, project=None):
        resp = self.client.get_alert(project or self.project, name)
        return resp.body

    def ensure_dashboard(self, name, project=None):
        try:
            self.client.get_dashboard(project or self.project, name)
        except LogException as e:
            self.client.create_dashboard(
                project or self.project, {
                    'dashboardName': name,
                    'displayName': name,
                    'charts': [],
                    'description': ''
                })
def send_log_to_aliyun(logstore, message):
    """
    向阿里云日志系统 发送log函数
    :param message:  将要发送的log字符串
    :return:  None
    """
    # 构建一个 client 使用 client 实例的方法来操作日志服务
    client = LogClient(settings.END_POINT, settings.ACCESS_KEY_ID,
                       settings.ACCESS_KEY)
    log_item = LogItem()
    log_item.set_time(int(time.time()))
    log_item.set_contents([('message', message)])
    put_logs_request = PutLogsRequest(settings.PROJECT, logstore, '', '',
                                      [log_item])
    client.put_logs(put_logs_request)  # 发送log
Exemple #12
0
 def __init__(self,
              endpoint,
              access_key_id,
              access_key,
              project=None,
              logstore=None,
              topic=None,
              source=None):
     self.logstore = logstore
     self.project = project
     self.topic = topic
     self.source = source
     assert isinstance(self.topic, str), 'topic must be string'
     # assert len(self.topic.split(':')) == 2, 'topic must format like xxx:xxx'
     self.client = LogClient(endpoint, access_key_id, access_key)
Exemple #13
0
    def _activate(self):
        try:
            req = urllib.request.Request(self._url)
            res = urllib.request.urlopen(req)
            data = res.read()
            temp_credentials = json.loads(data)

            access_key_id = temp_credentials['Credentials']['AccessKeyId']
            access_key = temp_credentials['Credentials']['AccessKeySecret']
            security_token = temp_credentials['Credentials']['SecurityToken']
            self._expire_time = temp_credentials['Credentials']['Expiration']

            self._client = LogClient(self._endpoint, access_key_id, access_key,
                                     security_token)
        except:
            self._network_available = False
Exemple #14
0
 def __init__(
     self,
     endpoint,
     access_id,
     access_key,
     project_name,
     logstore_name,
     topic,
     source,
 ):
     self._log_client = LogClient(
         endpoint=endpoint,
         accessKeyId=access_id,
         accessKey=access_key,
     )
     self._project_name = project_name
     self._logstore_name = logstore_name
     self._topic, self._source = topic, source
Exemple #15
0
 def __init__(self,endpoint,accessKeyId,accessKey,basename,tablename):
     
     self.endpoint = endpoint #http://oss-cn-hangzhou.aliyuncs.com
     # 用户访问秘钥对中的 AccessKeyId。
     self.accessKeyId = accessKeyId
     # 用户访问秘钥对中的 AccessKeySecret。
     self.accessKey = accessKey
     self.basename = basename
     self.tablename = tablename
     self.client = LogClient(self.endpoint, self.accessKeyId, self.accessKey)
Exemple #16
0
    def __init__(self):
        # SLS 项目名,其中所有的机器组都会被监控。
        self.__project_name = '<your_sls_project_name>'
        # SLS 项目所属区域的 endpoint。
        self.__endpoint = '<endpoint_of_your_sls_project_region>'  # cn-hangzhou.log.aliyuncs.com
        # 心跳超时阈值(秒),超过此阈值的机器可能存在异常,默认为 15 分钟,可根据需求调整。
        self.__hb_timeout_threshold = 15 * 60
        # 服务日志项目名:存放指定服务日志的 SLS 项目。
        self.__logtail_status_project_name = '<status_log_project_name>'  # log-service-<your_aliuid>-<region_name>
        # 状态日志查询的时间范围(秒),默认为最近 10 分钟。
        self.__query_range = 10 * 60
        # 状态日志数阈值:每分钟一条,10 分钟内少于此阈值判定为异常。
        self.__status_log_count_threshold = 8  # at least 8 status logs during recent 10 minutes.
        # 用于上报异常信息的 project/logstore,为空表示不上报至 SLS。
        self.__report_project_name = self.__project_name  # same project by default
        self.__report_logstore = ''

        self.__client = LogClient(
            endpoint=self.__endpoint,
            accessKeyId='',  # access key to call SLS APIs.
            accessKey='')
Exemple #17
0
def _setup_migration_logstore(endpoint, project, access_key_id, access_key):
    log_client = LogClient(
        endpoint=endpoint,
        accessKeyId=access_key_id,
        accessKey=access_key,
    )
    try:
        log_client.create_logstore(
            project_name=project,
            logstore_name=_migration_logstore,
        )
    except LogException as exc:
        if exc.get_error_code() != "LogStoreAlreadyExist":
            raise
    try:
        tokens = [
            ',', ' ', "'", '"', ';', '=', '(', ')', '[', ']', '{', '}', '?',
            '@', '&', '<', '>', '/', ':', '\n', '\t', '\r',
        ]
        line_config = IndexLineConfig(token_list=tokens)
        config = IndexConfig(line_config=line_config)
        log_client.create_index(project, _migration_logstore, config)
    except LogException as exc:
        if exc.get_error_code() != "IndexAlreadyExist":
            raise
def main():
    endpoint = os.environ.get('ALIYUN_LOG_SAMPLE_ENDPOINT', '')
    accessKeyId = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSID', '')
    accessKey = os.environ.get('ALIYUN_LOG_SAMPLE_ACCESSKEY', '')

    project = 'python-sdk-test' + str(time()).replace('.', '-')
    logstore = 'logstore'

    assert endpoint and accessKeyId and accessKey, ValueError(
        "endpoint/access_id/key cannot be empty")

    client = LogClient(endpoint, accessKeyId, accessKey, "")

    print("****create project", project)
    client.create_project(project, "SDK test")
    sleep(10)

    try:
        print("****create logstore", logstore)
        client.create_logstore(project, logstore, 1, 1)
        sleep(60)

        test_log_handler(endpoint, accessKeyId, accessKey, project, logstore)

        sleep(60)

        res = client.pull_log(project, logstore, 0, time() - 3600, time())
        for x in res:
            print(x.get_flatten_logs_json())
            assert len(x.get_flatten_logs_json()) == 10
            break

        # test extract json
        test_log_handler_json(endpoint, accessKeyId, accessKey, project,
                              logstore)

        # test extracct kv
        test_log_handler_kv(endpoint, accessKeyId, accessKey, project,
                            logstore)

        sleep(60)

        # test using file to configure logger
        os.environ['ALIYUN_LOG_SAMPLE_TMP_PROJECT'] = project
        config_path = os.sep.join([os.path.dirname(__file__), 'logging.conf'])
        logging.config.fileConfig(config_path)

        # create logger
        logger = logging.getLogger('sls')
        logger.info("log hanlder test via config file")

        sleep(20)

    finally:
        clean_project(client, project)
Exemple #19
0
def main():
    method_types, optdoc = parse_method_types_optdoc_from_class(LogClient, LOG_CLIENT_METHOD_BLACK_LIST)

    arguments = docopt(optdoc, version=__version__)
    system_options = normalize_system_options(arguments)

    # process normal log command
    if arguments.get('log', False):
        access_id, access_key, endpoint, jmes_filter = load_config(system_options)
        method_name, args = normalize_inputs(arguments, method_types)
        assert endpoint and access_id and access_key, ValueError("endpoint, access_id or key is not configured")
        client = LogClient(endpoint, access_id, access_key)
        client.set_user_agent(USER_AGENT)

        assert hasattr(client, method_name), "Unknown parsed command:" + method_name

        try:
            ret = getattr(client, method_name)(**args)

            if jmes_filter and ret is not None and ret.get_body():
                # filter with jmes
                try:
                    print(jmespath.compile(jmes_filter).search(ret.get_body()))
                except jmespath.exceptions.ParseError as ex:
                    print("**fail to parse with JMSE path, original data: ", ex)
                    print(_sort_str_dict(ret.get_body()))
                    exit(1)
            elif ret is not None:
                print(_sort_str_dict(ret.get_body()))

        except LogException as ex:
            print(_sort_str_dict(ex.get_resp_body()))
            exit(2)

    # process configure command
    elif arguments.get('configure', False):
        args = arguments['<secure_id>'], arguments['<secure_key>'], arguments['<endpoint>'], \
               arguments['<client_name>'] or LOG_CONFIG_SECTION
        configure_confidential(*args)
def get_copy_option():
    ##########################
    # Basic options
    ##########################

    # load connection info env and consumer group name from envs
    accessKeyId = os.environ.get('SLS_AK_ID', '')
    accessKey = os.environ.get('SLS_AK_KEY', '')
    endpoint = os.environ.get('SLS_ENDPOINT', '')
    project = os.environ.get('SLS_PROJECT', '')
    logstore = os.environ.get('SLS_LOGSTORE', '')
    to_endpoint = os.environ.get('SLS_ENDPOINT_TO', endpoint)
    to_project = os.environ.get('SLS_PROJECT_TO', project)
    to_logstore = os.environ.get('SLS_LOGSTORE_TO', '')
    consumer_group = os.environ.get('SLS_CG', '')

    assert endpoint and accessKeyId and accessKey and project and logstore and consumer_group, \
        ValueError("endpoint/access_id/key/project/logstore/consumer_group/name cannot be empty")

    assert to_endpoint and to_project and to_logstore, ValueError(
        "to endpoint/to project/to logstore cannot be empty")

    ##########################
    # Some advanced options
    ##########################

    # DON'T configure the consumer name especially when you need to run this program in parallel
    consumer_name = "{0}-{1}".format(consumer_group, current_process().pid)

    # This options is used for initialization, will be ignored once consumer group is created and each shard has beeen started to be consumed.
    # Could be "begin", "end", "specific time format in ISO", it's log receiving time.
    cursor_start_time = "begin"

    # copy from the latest one.
    option = LogHubConfig(endpoint,
                          accessKeyId,
                          accessKey,
                          project,
                          logstore,
                          consumer_group,
                          consumer_name,
                          cursor_position=CursorPosition.SPECIAL_TIMER_CURSOR,
                          cursor_start_time=cursor_start_time)

    # bind put_log_raw which is faster
    to_client = LogClient(to_endpoint, accessKeyId, accessKey)
    put_method = partial(to_client.put_log_raw,
                         project=to_project,
                         logstore=to_logstore)

    return option, put_method
Exemple #21
0
 def __init__(self,
              access_key=None,
              access_secret=None,
              region_id='cn-beijing',
              **kwargs):
     self._acs_client = AcsClient(access_key,
                                  access_secret,
                                  region_id=region_id,
                                  **kwargs)
     self._log_client = LogClient(
         os.environ.get('ALIYUN_LOG_SAMPLE_ENDPOINT',
                        'cn-beijing.log.aliyuncs.com'), access_key,
         access_secret)
     self._oss_auth = oss2.Auth(access_key, access_secret)
Exemple #22
0
def flow_chart_get_totle_count(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        try:
            domain = json_data['domain']
            req_sql = DOMAIN_REQUEST_TOTLE_COUNT%(domain)
            req_sql2 = DOMAIN_UPSTREAM_TOTLE_COUNT%(domain)
        except:
            req_sql = REQUEST_TOTLE_COUNT
            req_sql2 = UPSTREAM_TOTLE_COUNT
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        if time_zone == "7day":
            from_time = int(time() - 604800)
        elif time_zone == "24hour":
            from_time = int(time() - 86400)
        elif time_zone == "1hour":
            from_time = int(time() - 3600)
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://', '').replace('http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project, logstore=logstore, fromTime=from_time, toTime=int(time()), topic='',
                             query=req_sql)
        res = client.get_logs(req)
        request_count = ""
        for log_result in res.get_logs():
            try:
                request_count = log_result.get_contents()['count']
            except:
                pass
        client2 = LogClient(endpoint, accessKeyId, accessKey)
        req2 = GetLogsRequest(project=project, logstore=logstore, fromTime=from_time, toTime=int(time()), topic='',
                             query=req_sql2)
        res2= client2.get_logs(req2)
        upstream_count = ""
        for log_result2 in res2.get_logs():
            try:
                upstream_count = log_result2.get_contents()['count']
            except:
                pass
        return_result['result'] = True
        return_result['request_count'] = request_count
        return_result['upstream_count'] = upstream_count
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 400
        return JsonResponse(return_result, safe=False)
Exemple #23
0
def test_project_tags():
    client = LogClient(
        endpoint='cn-chengdu.log.aliyuncs.com',
        accessKeyId='***',
        accessKey='***',
    )

    tags = {
        "normal": "of course",
        "normal2": "of course",
        "tag name": "what...?",
        "中文": "我是“测试数据”",
    }
    client.tag_project("my-project", another_tag="show it", **tags)
    client.untag_project("my-project", "normal", "normal2")

    for resp in client.get_project_tags("my-project"):
        for tag_key, tag_value in resp.get_tags().items():
            print(tag_key, "==>", tag_value)
def attack_chart_get_type_trend(request):
    return_result = {}
    data = []
    try:
        user_id = request.session['user_id']
        json_data = json.loads(request.body)
        time_zone = json_data['time_zone']
        from_time = int(time() - 86400)
        try:
            domain = json_data['domain']
            if time_zone == "7day":
                from_time = int(time() - 604800)
                req_sql = DOMAIN_ATT_TYPE_7D % (domain)
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
                req_sql = DOMAIN_ATT_TYPE_24H % (domain)
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
                req_sql = DOMAIN_ATT_TYPE_1H % (domain)
        except:
            if time_zone == "7day":
                from_time = int(time() - 604800)
                req_sql = ATT_TYPE_7D
            elif time_zone == "24hour":
                from_time = int(time() - 86400)
                req_sql = ATT_TYPE_24H
            elif time_zone == "1hour":
                from_time = int(time() - 3600)
                req_sql = ATT_TYPE_1H
        global_result = waf_global.objects.get(user_id=user_id)
        endpoint = global_result.aliyun_log_endpoint.replace('https://',
                                                             '').replace(
                                                                 'http://', '')
        accessKeyId = global_result.aliyun_access_id
        accessKey = global_result.aliyun_access_secret
        project = global_result.aliyun_project
        logstore = global_result.aliyun_logstore
        client = LogClient(endpoint, accessKeyId, accessKey)
        req = GetLogsRequest(project=project,
                             logstore=logstore,
                             fromTime=from_time,
                             toTime=int(time()),
                             topic='',
                             query=req_sql)
        res = client.get_logs(req)
        x = []
        x_exist = {}
        y = []
        y_exist = {}
        for log_result in res.get_logs():
            print log_result.get_contents()
            if not x_exist.has_key(log_result.get_contents()['time']):
                x.append(log_result.get_contents()['time'])
                x_exist[log_result.get_contents()['time']] = len(x) - 1
            if not y_exist.has_key(
                    log_result.get_contents()['protection_type']):
                if log_result.get_contents()['protection_type'] != 'null':
                    y.append(log_result.get_contents()['protection_type'])
                    y_exist[log_result.get_contents()
                            ['protection_type']] = True
        result = {}
        for tmp in y:
            ss = [0]
            result[tmp] = ss * len(x)
        for log_result in res.get_logs():
            for tmp in y:
                if log_result.get_contents()['protection_type'] == tmp:
                    tt = result[tmp]
                    tt[x_exist[log_result.get_contents()
                               ['time']]] = log_result.get_contents()['count']
                    result[tmp] = tt
        return_result['result'] = True
        return_result['message'] = result
        return_result['x'] = x
        return_result['y'] = y
        return JsonResponse(return_result, safe=False)
    except Exception, e:
        return_result['result'] = False
        return_result['message'] = str(e)
        return_result['errCode'] = 103
        return JsonResponse(return_result, safe=False)
Exemple #25
0
from aliyun.log import LogClient

# TODO: change me
project = "test-project"
logstore = "test-logstore"
role = "acs:ram::00000000:role/aliyunlogdefaultrole"

bucket = "test-bucket"
prefix = "test-data"

client = LogClient(
    endpoint='cn-chengdu.log.aliyuncs.com',
    accessKeyId='***',
    accessKey='***',
)

shipper = {
    "shipperName": "test-shipper",
    "targetConfiguration": {
        "bufferInterval": 300,
        "bufferSize": 32,
        "compressType": "none",
        "enable": True,
        "ossBucket": bucket,
        "ossPrefix": prefix,
        "pathFormat": "%Y/%m/%d/%H/%M",
        "roleArn": role,
        "storage": {
            "detail": {
                "enableTag": False
            },
Exemple #26
0
class AliyunLog():
    def __init__(self,
                 endpoint,
                 access_key_id,
                 access_key,
                 project=None,
                 logstore=None,
                 topic=None,
                 source=None):
        self.logstore = logstore
        self.project = project
        self.topic = topic
        self.source = source
        assert isinstance(self.topic, str), 'topic must be string'
        # assert len(self.topic.split(':')) == 2, 'topic must format like xxx:xxx'
        self.client = LogClient(endpoint, access_key_id, access_key)

    # def add_log(self, item:dict):  # contents为[(name1, value1), (name2, value2), ...]
    #     log_item = LogItem(int(time.time()), list(item.items()))
    #     log_req = PutLogsRequest(self.project, self.logstore, topic=self.topic, source=self.source, logitems=[log_item])
    #     self.client.put_logs(log_req)

    def add_log(self, item: str):  # item为json.dumps(obj)
        log_item = LogItem(int(time.time()), [('content', item)])
        log_req = PutLogsRequest(self.project,
                                 self.logstore,
                                 topic=self.topic,
                                 source=self.source,
                                 logitems=[log_item])
        self.client.put_logs(log_req)

    def get_topics(self, fromTime=None, toTime=None):
        try:
            req = GetLogsRequest(
                self.project,
                self.logstore,
                fromTime=fromTime,
                toTime=toTime,
                topic=self.topic,
                query='*|select "__topic__" group by "__topic__"')
            res = self.client.get_logs(req)
            return [log.get_contents()['__topic__'] for log in res.get_logs()]
        except Exception as e:
            print("Get topic error: %s" % str(e))
            return []

    def get_logs(self, fromTime, toTime):
        try:
            req = GetLogsRequest(self.project,
                                 self.logstore,
                                 fromTime=fromTime,
                                 toTime=toTime,
                                 query='*')
            res = self.client.get_logs(req)
            return [log.get_contents() for log in res.get_logs()]
        except Exception as e:
            print("Get logs error: %s" % str(e))
            return []
        try:
            listShardRes = self.client.list_shards(self.project, self.logstore)
            log_list = []
            for shard in listShardRes.get_shards_info():
                shard_id = shard["shardID"]
                res = self.client.get_cursor(self.project, self.logstore,
                                             shard_id, fromTime)
                start_cursor = res.get_cursor()
                res = self.client.get_cursor(self.project, self.logstore,
                                             shard_id, toTime)
                end_cursor = res.get_cursor()
                while True:
                    loggroup_count = 100  # 每次读取100个包
                    res = self.client.pull_logs(self.project, self.logstore,
                                                shard_id, start_cursor,
                                                loggroup_count, end_cursor)
                    log_list += res.get_loggroup_json_list()
                    next_cursor = res.get_next_cursor()
                    if next_cursor == start_cursor:
                        break
                    start_cursor = next_cursor
            return log_list
        except Exception as e:
            print("Get topic error: %s" % str(e))
            return []
#!C:\Program Files\Python38
# -*- coding: utf-8 -*-

from aliyun.log import LogClient, GetLogsRequest
from openpyxl.styles import Font, colors, Alignment, PatternFill, Border, Side  #设定字体,字体颜色,文字位置,背景色,框线,框线样式
import openpyxl
import datetime, time
import os

#微乐日志服务api
wlclient = LogClient()
wlproject = ''  #projectname
wlusername = '******'
wlhalls = [
    '小游戏大厅_通用', '小游戏大厅_四川_甘肃_宁夏_云南', '小游戏大厅_陕西', '小游戏大厅_山西_内蒙', '小游戏大厅_山东',
    '小游戏大厅_辽宁', '小游戏大厅_江西_福建', '小游戏大厅_江苏_安徽_浙江_上海', '小游戏大厅_吉林', '小游戏大厅_湖南',
    '小游戏大厅_湖北', '小游戏大厅_黑龙江', '小游戏大厅_河南', '小游戏大厅_河北_北京_天津', '小游戏大厅_贵州',
    '小游戏大厅_广东_广西_海南', '小游戏大厅_高防', 'APP大厅_通用'
]
#吉祥日志服务api
jxclient = LogClient()
jxproject = ''  #projectname
jxusername = '******'
jxhalls = ['吉祥大厅', '小程序']


class create_xls_data:
    def __init__(self, client, project, username, halls):
        self.client = client
        self.project = project
        self.username = username
Exemple #28
0
 def __init__(self):
     self.client = LogClient(configs['endpoint'], configs['accessKeyId'],
                             configs['accessKey'])
     self.project = configs['logProjectName']
Exemple #29
0
def logClient(endpoint):
    accessKeyId = 'LTAIAyQ560nxSgJI'
    accessKey = 'sbHue8SB1Kkg5XPj9mCzoBGa4PMICX'
    client = LogClient(endpoint, accessKeyId, accessKey)
    return client
Exemple #30
0
def init_sls_client(endpoint, access_key_id, access_key):
    return LogClient(endpoint, access_key_id, access_key)