Beispiel #1
0
    def __init__(self, max_queue_size, work_queue, interface, custom_tag,
                 return_deep_info, http_filter_json, cache_size, session_size,
                 bpf_filter, timeout, debug):
        """
		构造函数
		:param max_queue_size: 资产队列最大长度
		:param work_queue: 捕获资产数据消息发送队列
		:param interface: 捕获流量的网卡名
		:param custom_tag: 数据标签,用于区分不同的采集引擎
		:param return_deep_info: 是否处理更多信息,包括原始请求、响应头和正文
		:param http_filter_json: HTTP过滤器配置,支持按状态和内容类型过滤
		:param cache_size: 缓存的已处理数据条数,120秒内重复的数据将不会发送Syslog
		:param session_size: 缓存的HTTP/TCP会话数量,16秒未使用的会话将被自动清除
		:param bpf_filter: 数据包底层过滤器
		:param timeout: 采集程序的运行超时时间,默认为启动后1小时自动退出
		:param debug: 调试开关
		"""
        self.total_msg_num = 0
        self.max_queue_size = max_queue_size
        self.work_queue = work_queue
        self.debug = debug
        self.timeout = timeout
        self.bpf_filter = bpf_filter
        self.cache_size = cache_size
        self.session_size = session_size
        self.http_filter_json = http_filter_json
        self.return_deep_info = return_deep_info
        self.custom_tag = custom_tag
        self.interface = interface
        self.sniffer = pcap.pcap(self.interface,
                                 snaplen=65535,
                                 promisc=True,
                                 timeout_ms=self.timeout,
                                 immediate=False)
        self.sniffer.setfilter(self.bpf_filter)
        self.tcp_stream_cache = Cache(maxsize=self.session_size,
                                      ttl=30,
                                      timer=time.time,
                                      default=None)
        if self.cache_size:
            self.tcp_cache = Cache(maxsize=self.cache_size,
                                   ttl=120,
                                   timer=time.time,
                                   default=None)
            self.http_cache = Cache(maxsize=self.cache_size,
                                    ttl=120,
                                    timer=time.time,
                                    default=None)
        # http数据分析正则
        self.decode_request_regex = re.compile(
            r'^([A-Z]+) +([^ \r\n]+) +HTTP/\d+(?:\.\d+)?[^\r\n]*(.*?)$', re.S)
        self.decode_response_regex = re.compile(
            r'^HTTP/(\d+(?:\.\d+)?) (\d+)[^\r\n]*(.*?)$', re.S)
        self.decode_body_regex = re.compile(
            rb'<meta[^>]+?charset=[\'"]?([a-z\d\-]+)[\'"]?', re.I)
Beispiel #2
0
    def __init__(self, work_queue, interface, custom_tag, return_deep_info,
                 http_filter_json, cache_size, session_size, bpf_filter,
                 timeout, debug):
        """
		构造函数
		:param work_queue: 捕获资产数据消息发送队列
		:param interface: 捕获流量的网卡名
		:param custom_tag: 数据标签,用于区分不同的采集引擎
		:param return_deep_info: 是否处理更多信息,包括原始请求、响应头和正文
		:param http_filter_json: HTTP过滤器配置,支持按状态和内容类型过滤
		:param cache_size: 缓存的已处理数据条数,120秒内重复的数据将不会发送Syslog
		:param session_size: 缓存的HTTP/TCP会话数量,16秒未使用的会话将被自动清除
		:param bpf_filter: 数据包底层过滤器
		:param timeout: 采集程序的运行超时时间,默认为启动后1小时自动退出
		:param debug: 调试开关
		"""
        self.work_queue = work_queue
        self.debug = debug
        self.timeout = timeout
        self.bpf_filter = bpf_filter
        self.cache_size = cache_size
        self.session_size = session_size
        self.http_filter_json = http_filter_json
        self.return_deep_info = return_deep_info
        self.custom_tag = custom_tag
        self.interface = interface
        self.pktcap = pyshark.LiveCapture(interface=self.interface,
                                          bpf_filter=self.bpf_filter,
                                          use_json=False,
                                          debug=self.debug)
        if self.session_size:
            self.http_stream_cache = Cache(maxsize=self.session_size,
                                           ttl=16,
                                           timer=time.time,
                                           default=None)
            self.tcp_stream_cache = Cache(maxsize=self.session_size,
                                          ttl=16,
                                          timer=time.time,
                                          default=None)
        if self.cache_size:
            self.http_cache = LRUCache(maxsize=self.cache_size,
                                       ttl=120,
                                       timer=time.time,
                                       default=None)
            self.tcp_cache = LRUCache(maxsize=self.cache_size,
                                      ttl=120,
                                      timer=time.time,
                                      default=None)
        # 检测页面编码的正则表达式
        self.encode_regex = re.compile(
            rb'<meta [^>]*?charset=["\']?([a-z\-\d]+)["\'>]?', re.I)
Beispiel #3
0
def test_cache_full_maxsize_negative():
    """Test that cache.full() works when maxsize is negative."""
    cache = Cache()
    cache.maxsize = -1
    for n in range(1000):
        cache.set(n, n)
        assert not cache.full()
Beispiel #4
0
def test_cache_default():
    """Test that Cache can set the default for Cache.get()."""
    cache = Cache(default=True)

    assert cache.get(1) is True
    assert 1 not in cache
    assert cache.get(2, default=False) is False
    assert 2 not in cache
Beispiel #5
0
 def __init__(self, data_config_file="dataBase.yaml"):
     """
     初始化
     """
     self.mysql_conf = setup.MySql(data_config_file)
     self.engine = create_engine(self.mysql_conf.PyMySql_STR)
     self.db_session_factory = sessionmaker(bind=self.engine)
     self.app = setup.App()
     self.cache = Cache()
Beispiel #6
0
	def __init__(self,interface,display_filter,syslog_ip,syslog_port,custom_tag,return_deep_info,filter_rules,cache_size,bpf_filter,timeout,debug):
		self.debug = debug
		self.timeout = timeout
		self.bpf_filter = bpf_filter
		self.cache_size = cache_size
		self.filter_rules = filter_rules
		self.return_deep_info = return_deep_info
		self.custom_tag = custom_tag
		self.syslog_ip = syslog_ip
		self.syslog_port = syslog_port
		self.log_obj = _logging(self.syslog_ip,self.syslog_port)
		self.interface = interface
		self.display_filter = display_filter
		self.pktcap = pyshark.LiveCapture(interface=self.interface, bpf_filter=self.bpf_filter, use_json=True, display_filter=self.display_filter, debug=self.debug)
		self.http_cache = Cache(maxsize=self.cache_size, ttl=120, timer=time.time, default=None)
		self.tcp_cache = Cache(maxsize=self.cache_size, ttl=120, timer=time.time, default=None)
		# 检测页面编码的正则表达式
		self.encode_regex = re.compile(b'<meta [^>]*?charset=["\']?([^"\'\s]+)["\']?', re.I)
Beispiel #7
0
 def __init__(self, ip, port, password):
     self.ip = ip
     self.port = port
     self.password = password
     self.redis_pool = redis.ConnectionPool(host=ip,
                                            port=port,
                                            db=0,
                                            password=password,
                                            encoding='utf-8')
     self.cache = Cache()
     self.cur = os.getcwd()
Beispiel #8
0
def main(options):
    """
    主函数
    :param options: 命令行传入参数对象
    """
    global es, cacheIds, cache, threadLock, debug, processCount, threadExit, startTime, scrollId
    
    debug = options.debug
    cacheIds = Cache(maxsize=512, ttl=60, timer=time.time, default=None)
    cache = Cache(maxsize=options.cache_size, ttl=options.cache_ttl, timer=time.time, default=None)

    threadLock = threading.RLock()
    threadList = [None for i in range(options.threads)]

    es = Elasticsearch(hosts=options.hosts)
    # 更新索引模板
    index_template(es)
    # 获取搜索位置信息
    scrollId = get_scroll(es)

    try:
        for i in range(options.threads):
            threadList[i] = threading.Thread(target=filter_thread, args=(i, options))
            threadList[i].setDaemon(True)
            threadList[i].start()
            time.sleep(1)

        while True:
            time.sleep(5)
    except KeyboardInterrupt:
        print('Ctrl+C, exiting ...')
        threadLock.acquire()
        threadExit = True
        threadLock.release()

    for i in range(options.threads):
        if threadList[i] and threadList[i].isAlive():
            print('Thread {} waiting to exit...'.format(i))
            threadList[i].join()
    
    quit(0)
Beispiel #9
0
def test_cache_get_many__ttl_expires_during_call(items: dict,
                                                 iteratee: t.Union[list, str,
                                                                   t.Pattern,
                                                                   t.Callable],
                                                 expected: dict):
    """Test that cache.get_many() returns without error when cache keys expire during call."""
    cache = Cache(ttl=1, timer=lambda: 0)

    cache.set_many(items)
    assert cache.get_many(iteratee) == expected

    cache.timer = lambda: 100
    assert cache.get_many(iteratee) == {}
Beispiel #10
0
class CacheManager:

    cache = Cache()
    cache_ttl = CACHE_TTL

    def set_val(self, key: str, val: dict):
        self.cache.set(key, val, ttl=self.cache_ttl)

    def get_val(self, key: str) -> dict:
        if key in self.cache:
            return self.cache.get(key)

        return POSSIBLE_STATUSES[NOT_FOUND]
Beispiel #11
0
def test_cache_default_callable():
    """Test that Cache can set a default function for Cache.get()."""
    def default(key):
        return False

    def default_override(key):
        return key

    cache = Cache(default=default)

    assert cache.get("key1") is False
    assert cache.get("key1", default=default_override) is False
    assert cache.get("key2", default=default_override) == "key2"
    assert cache.get("key3", default=3) == 3
Beispiel #12
0
class Cacheout:
    global_settings = json.load(
        open(os.path.join('./app', "static", "settings.json"), "r"))
    cache = Cache()
    cache_ttl = global_settings['CACHE_TTL']

    def set(self, key: str, val: dict):
        self.cache.set(key, val, ttl=self.cache_ttl)

    def get(self, key: str) -> dict:
        if key in self.cache:
            return self.cache.get(key)
        return self.global_settings["STATUSES"]["NOT_FOUND"]

    def get_global_settings(self):
        return self.global_settings
class GateEditor():
    cache = Cache(maxsize=256)

    def __init__(self, config):
        self.gateindex = GateIndex(config)

    def editWave(self, config, gateMap):
        cache_key = hash_params(config, gateMap)
        wave_tuple = self.cache.get(cache_key)

        if wave_tuple is None:
            wave_tuple = editWave(config, gateMap, gateindex=self.gateindex)
            self.cache.set(cache_key, wave_tuple)
        else:
            log.info(f'GateEditor.editWave use cache {cache_key}')
        # wave_tuple_copy = copy.deepcopy(wave_tuple) # copy 耗时10ms
        return wave_tuple
Beispiel #14
0
 def __init__(self):
     super().__init__()
     self.channels = []  # save subscribed channels
     self.gateways = {
         'binance': BinanceWs,
         'okex': OkexWs,
         'huobi': HuobiWs,
         'wootrade': WootradeWs
     }  # class of exchanges
     self.ongoing_gateway = {}  # has been instantiated exchanges
     self.feature = [
         'price', 'orderbook', 'trade', 'kline', 'order', 'wallet'
     ]
     self.cache = Cache(maxsize=256, timer=time.time)
     self.server = redis.StrictRedis(host='localhost',
                                     port=6379)  # redis server
     LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
     DATE_FORMAT = '%m/%d/%Y %H:%M:%S %p'
     logging.basicConfig(filename='server.log',
                         level=logging.WARNING,
                         format=LOG_FORMAT,
                         datefmt=DATE_FORMAT)
     Thread(target=self.receiver).start()
     Thread(target=self.publisher).start()
Beispiel #15
0
import time

import requests
from cacheout import Cache
from requests import RequestException

from ys7_snap import settings
from ys7_snap.error import AccessTokenError, get_err_msg, CaptureError

cache = Cache(maxsize=500,
              ttl=settings.YS_TOKEN_CACHE_TIME,
              timer=time.time,
              default=None)


def get_access_token() -> str:
    """获取access token"""
    access_token = cache.get("access_token", None)
    if access_token:
        return access_token

    try:
        ys_token_get_url = settings.YS_TOKEN_GET_URL
        ys_app_key = settings.YS_APP_KEY
        ys_app_secret = settings.YS_APP_SECRET
        post_data = {"appKey": ys_app_key, "appSecret": ys_app_secret}

        data = requests.post(ys_token_get_url, data=post_data).json()
        rsp_code = data.get("code", 200)
        if rsp_code == "200":
            access_token = data['data']['accessToken']
Beispiel #16
0
# coding:utf-8

from urllib import parse, request
import random
import time
from cacheout import Cache

cache = Cache(maxsize=256, ttl=300, timer=time.time)


def send_sms(phone_no):
    url = 'http://utf8.api.smschinese.cn'
    user_name = "cxd2017"
    key = "43a486aebdcd6d294107"
    # smsMob = '18201716178'
    smsMob = phone_no
    code = random_code()
    smsText = "DXC科技,验证码:" + code
    textmod = {
        'Uid': user_name,
        "Key": key,
        "smsMob": smsMob,
        "smsText": smsText
    }
    # json串数据使用
    # textmod = json.dumps(textmod).encode(encoding='gbk')
    # 普通数据使用
    textmod = parse.urlencode(textmod).encode(encoding='utf-8')
    print(textmod)
    header_dict = {
        "Content-Type": "application/x-www-form-urlencoded;charset=utf-8"
Beispiel #17
0
from flask import Blueprint
from flask import jsonify
from flask import abort
from cacheout import Cache

from app.models.codetables.group import CtGroupTypes
from app.services import codetablesService
from app.web.schemas.generalSchemas import CodeTableSchema

api = Blueprint('codetables_api', __name__)

allowable_codetable_map = {"CtGroupTypes": CtGroupTypes}
_codetable_cache = Cache(maxsize=200, ttl=5 * 60)


@api.route('/api/v1.0/admin/codetables/<codetable_name>', methods=['GET'])
def codetable_by_name(codetable_name):

    allowed_codetable = allowable_codetable_map.get(codetable_name)

    if allowed_codetable is not None:
        # Check cache
        cached_codetable = _codetable_cache.get(codetable_name)

        if cached_codetable is not None:
            return jsonify(cached_codetable)

        else:
            codetable_data = codetablesService.get_code_table(
                allowed_codetable)
            data = CodeTableSchema().dump(codetable_data, many=True)
Beispiel #18
0
from flask import Flask
from flask import make_response
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from flask_socketio import SocketIO
from app.Vendor.Code import Code
import os
import time
from cacheout import Cache
from app.env import (SQLALCHEMY_DATABASE_URI, SQLALCHEMY_TRACK_MODIFICATIONS,
                     UPLOAD_FOLDER, MAX_CONTENT_LENGTH, REDIS_PAS, REDIS_IP,
                     REDIS_PORT, REDIS_DB)

#普通json带error_code风格使用此app示例
app = Flask(__name__, static_folder=os.getcwd() + '/uploads')
cache = Cache(maxsize=2560, ttl=86400, timer=time.time,
              default=None)  # defaults
# 实例化websocket
async_mode = 'gevent'
socketio = SocketIO(
    app,
    async_mode=async_mode,
    logger=True,
    engineio_logger=True,
    cors_allowed_origins="*"
)  #message_queue="redis://:{}@{}:{}/{}".format(REDIS_PAS,REDIS_IP,REDIS_PORT,REDIS_DB), async_mode=async_mode, logger=True, engineio_logger=True)
# 配置 sqlalchemy  数据库驱动://数据库用户名:密码@主机地址:端口/数据库?编码
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = SQLALCHEMY_TRACK_MODIFICATIONS
#上传文件配置
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER  #上传目录
app.config['MAX_CONTENT_LENGTH'] = MAX_CONTENT_LENGTH  #上传大小
Beispiel #19
0
# -*- coding: utf-8 -*-
import datetime
import pathlib
import pandas as pd
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go

from dash.dependencies import Input, Output, State
from plotly import tools

import time
from cacheout import Cache
cache = Cache(maxsize=1, ttl=0, timer=time.time)

app = dash.Dash(__name__,
                meta_tags=[{
                    "name": "viewport",
                    "content": "width=device-width"
                }])

server = app.server

# Currency pairs
currencies = [
    'AUDUSD', 'AUDCAD', 'USDCAD', 'EURUSD', 'GBPUSD', 'USDJPY', 'USDCHF',
    'NZDUSD'
]
#currencies = ['AUDUSD']
Beispiel #20
0
def cache(timer):
    return Cache(timer=timer)
Beispiel #21
0
def test_cache_init_validation(arg, exc):
    """Test that exceptions are raised on bad argument values/types."""
    with pytest.raises(exc):
        Cache(**arg)
Beispiel #22
0
from cacheout import Cache
import requests
from read_yaml import *

cache_Casetype = Cache()
def get_caseType():
    lll = cache_Casetype.get("caseType")
    if lll == None:
        print ("新建缓存")
        dict = requests.get(getName("caseType_dict")["url"]).json()
        cases = {}
        for i in dict['resultMessage']:
            cases[i["caseType"]] = i["caseTypeDesc"]
        cases["caseType0701"] = "非公益诉讼"
        cache_Casetype.set("caseType", value=cases, ttl=60 * 60 * 7 * 24)
        lll = cases
    return lll

def get_xy(code):
    lll = cache_Casetype.get(code)
    if lll == None:
        print("新建缓存")
        address = requests.get(getName("code_to_does")["url"] + str(code)).json()
        address = address.get("result")
        baidu = requests.get(getName("baidu")["url"] + "address=" + address).json()
        xy = [baidu["result"]["location"]["lng"],baidu["result"]["location"]["lat"]]
        result = {"code":code,"address":address,"xy":xy}
        cache_Casetype.set(code, value=result, ttl=0)
        lll = result
    return lll
Beispiel #23
0
 def new_cache():
     return Cache(
         maxsize=self.get_conf_int("CACHE_MAXSIZE", 256),
         ttl=self.get_conf_float("CACHE_TTL"),
     )
Beispiel #24
0
# -*- coding: utf-8 -*-
# __file__  : test_01.py
# __time__  : 2020/6/29 3:31 下午
import asyncio
import time

from cacheout import Cache, CacheManager

cache = Cache()

cache = Cache(maxsize=256, ttl=0, timer=time.time, default=None)  # defaults

cache.set(1, "foobar")

assert cache.get(1) == "foobar"

assert cache.get(2) is None
assert cache.get(2, default=False) is False
assert 2 not in cache

assert 2 not in cache
assert cache.get(2, default=lambda key: key) == 2
assert cache.get(2) == 2
assert 2 in cache

cache.set(3, {"data": {}}, ttl=1)
assert cache.get(3) == {"data": {}}
time.sleep(1)
assert cache.get(3) is None

Beispiel #25
0
def get_cache_driver():
    return Cache(maxsize=setting.CACHE_MAXSIZE,
                 ttl=setting.CACHE_TTL,
                 timer=time.time,
                 default=None)
Beispiel #26
0
 def __init__(self):
     self.db = OperationDbInterface()
     self.cases_to_run = []
     self.cache = Cache(maxsize=256, ttl=0, timer=time.time)
Beispiel #27
0
    async def start(self):
        general_log_file = self.config.logging_general_path if self.config.logging_general_path \
            else f'logs/{self.config.name.lower()}.log'
        errors_log_file = self.config.logging_error_path if self.config.logging_error_path \
            else f'logs/{self.config.name.lower()}-errors.log'
        general_log_directory = os.path.dirname(general_log_file)
        errors_log_directory = os.path.dirname(errors_log_file)

        if not os.path.exists(general_log_directory):
            os.mkdir(general_log_directory)

        if not os.path.exists(errors_log_directory):
            os.mkdir(errors_log_directory)

        self.logger = logging.getLogger('mystic')
        universal_handler = RotatingFileHandler(general_log_file,
                                                maxBytes=2097152,
                                                backupCount=3,
                                                encoding='utf-8')

        error_handler = logging.FileHandler(errors_log_file)
        console_handler = logging.StreamHandler(stream=sys.stdout)

        log_formatter = logging.Formatter(
            '%(asctime)s [%(levelname)-5.5s]  %(message)s')
        error_handler.setLevel(logging.ERROR)

        universal_handler.setFormatter(log_formatter)
        console_handler.setFormatter(log_formatter)

        self.logger.addHandler(universal_handler)
        self.logger.addHandler(console_handler)
        self.logger.addHandler(error_handler)

        level = logging.getLevelName(self.config.logging_level)
        self.logger.setLevel(level)

        self.server = await asyncio.start_server(self.client_connected,
                                                 self.config.address,
                                                 self.config.port)

        await self.db.set_bind('postgresql://{}:{}@{}/{}'.format(
            self.config.database_username, self.config.database_password,
            self.config.database_address, self.config.database_name))

        self.logger.info('Booting Mystic')

        self.redis = await aioredis.create_redis_pool('redis://{}:{}'.format(
            self.config.redis_address, self.config.redis_port),
                                                      minsize=5,
                                                      maxsize=10)

        if self.config.type == 'world':
            await self.redis.delete(f'mystic.players.{self.config.id}')
            await self.redis.hset(f'mystic.population', self.config.id, 0)

            self.cache = Cache(maxsize=None, ttl=self.config.cache_expiry)

            self.client_class = Penguin
            self.penguin_string_compiler = PenguinStringCompiler()
            self.anonymous_penguin_string_compiler = PenguinStringCompiler()

            PenguinStringCompiler.setup_default_builder(
                self.penguin_string_compiler)
            PenguinStringCompiler.setup_anonymous_default_builder(
                self.anonymous_penguin_string_compiler)

            await self.xml_listeners.setup(
                mystic.handlers, exclude_load='mystic.handlers.login.login')
            await self.xt_listeners.setup(mystic.handlers)
            self.logger.info('World server started')
        else:
            await self.xml_listeners.setup(mystic.handlers,
                                           'mystic.handlers.login.login')
            self.logger.info('Login server started')

        await self.dummy_event_listeners.setup(mystic.handlers)
        await self.dummy_event_listeners.fire('boot', self)

        self.permissions = await PermissionCollection.get_collection()

        self.logger.info(
            f'Multi-client support is '
            f'{"enabled" if not self.config.single_client_mode else "disabled"}'
        )
        self.logger.info(
            f'Listening on {self.config.address}:{self.config.port}')

        if self.config.auth_key != 'mystic':
            self.logger.warning(
                'The static key has been changed from the default, '
                'this may cause authentication issues!')

        await self.plugins.setup(mystic.plugins)

        async with self.server:
            await self.server.serve_forever()
 def __init__(self, path):
     self.cache = Cache()
     self.root = etree.parse(r'{0}'.format(path))
Beispiel #29
0
    2: 'BATTERY_HEALTH_GOOD',
    3: 'BATTERY_HEALTH_OVERHEAT',
    4: 'BATTERY_HEALTH_DEAD',
    5: 'BATTERY_HEALTH_OVER_VOLTAGE',
    6: 'BATTERY_HEALTH_UNSPECIFIED_FAILURE',
    7: 'BATTERY_HEALTH_COLD',
}

log_levels = {
    'DEBUG': logging.DEBUG,
    'INFO': logging.INFO,
    'WARNING': logging.WARNING,
    'ERROR': logging.ERROR
}

cache = Cache()


class Case:
    init = 'init'
    click = 'click'
    input = 'input'
    diff = 'diff'
    end = 'end'
    press = 'press'
    sleep = 'sleep'
    adb = 'adb'
    check_pkg = 'check_pkg'
    key_events = [
        'home',
        'back',
Beispiel #30
0
from cacheout import Cache

cache = Cache(maxsize=1024000, ttl=600)
#    return cache