예제 #1
0
    def __init__(self, shape, train=7, val=1, test=2):
        self.labels = 17

        #   read data from cache or origin tgz
        cache = Cache('./cache/17flowers{}.pkl'.format(shape))
        self.data = cache.load()
        if not self.data:
            from IO import load_flowers17
            self.data = load_flowers17('./data/17flowers.tgz', shape)
            cache.save(self.data)

        path = './data/flowers17.pkl'
        if os.path.isfile(path):
            with open(path, 'rb') as f:
                self.train, self.val, self.test = pickle.load(f)
        else:
            self.train, self.val, self.test = \
                    Sample(), Sample(), Sample()
            self.split_data(train, val, test)

            self.train.to_np()
            self.val.to_np()
            self.test.to_np()
            with open(path, 'wb') as f:
                pickle.dump([self.train, self.val, self.test], f)
        self.train_list = range(len(self.train.image))
        self.val_list = range(len(self.val.image))
        self.test_list = range(len(self.test.image))
예제 #2
0
    def login(self, url, success_judge, \
            certcode_url=None, cache_name='./data', anonymous=False, \
            **kwargs):
        from getpass import getpass
        _input = getpass if anonymous else input
        
        def get_certcode(url):
            print('[LOG] downloading certcode from %s ..' % url)
            name = 'certcode.jpg'
            certcode_path = os.path.join(self.cache_path, name)
            self.download(name, url)
            print('[LOG] try to import cv2..')
            try:
                import cv2
                img = cv2.imread(certcode_path)
                if 'recognition' in kwargs:
                    print('[LOG] use auto recognition instead!')
                    return kwargs['recognition'](img, kwargs['pattern'])
                print('[SUC] successfully import cv2. please watch the image window and input certcode..')
                cv2.imshow(cache_name, img)
                cv2.waitKey(3000)
            except:
                print('[ERR] failed to import cv2. please input certcode from %s' % certcode_path)
            certcode = str(input('[I N] certcode: '))
            return certcode

        print('[LOG] start to login %s ..' % url)
        information = Cache(cache_name)
        data = information.load('login')
        if not data:
            username = _input('[I N] username: '******'[I N] password: '******'[I N] tel: ')
            ####    ####    ####    ####
            #   change data format to what target website needs
            data = {
                'nickName': username,
                'password': password,
                'logintype': 'PLATFORM',
            }
            ####    ####    ####    ####
            information.save(tel, 'tel')
            information.save(data, 'login')
        certcode = get_certcode(certcode_url) if certcode_url else None

        ####    ####    ####    ####
        #   change data format to what target website needs
        if certcode:
            data['checkCode'] = certcode
        ####    ####    ####    ####

        print('[LOG] hi, user %s.' % (data['nickName'] \
                if not anonymous else 'anonymous'))

        response = self.post(url, data=data, headers=self.headers)
        self.html_save(response, 'login.html')

        
        return success_judge(response)
예제 #3
0
 def __init__(self, *, timeout=None, headers=cfg.default_headers, \
         headers_path=None, keys=None, cache_path='html_cache',\
         encoding=None):
     self.sess = requests.Session()
     self.timeout=timeout
     self.headers = self.make_headers(headers_path, keys) \
         if isinstance(headers_path, str) else headers
     self.cache_path = cache_path
     self.cache = Cache(self.cache_path)
     self.encoding = encoding
예제 #4
0
파일: Zabbix.py 프로젝트: 40a/zabbixctl
    def __init__(self,
                 host,
                 uri_path,
                 user,
                 noverify=False,
                 cacert=None,
                 http=False,
                 timeout=30):
        """
        Initializes a Zabbix instance
        :param host: hostname to connect to (ex. zabbix.yourdomain.net)
        :param user: username to connect with (ex. Admin)
        :param uri_path: uri path to zabbix api (ex. zabbix)
        :param noverify: turns off verification
        :param cacert: the certificate authority to use
        :param http: flag to use http over https
        :param timeout: API timeout parameter
        :return: Zabbix instance
        """

        self.cache = Cache('/tmp/zabbix.cache')
        self.host = host
        self.cache_slug = '{0}-{1}'.format(host, user)

        zabbix_url = urlunparse([
            'http' if http else 'https',
            host.strip('/'), uri_path, '', '', ''
        ])
        log.debug("Creating instance of Zabbic with url: %s", zabbix_url)

        self.zapi = ZabbixAPI(zabbix_url)

        if cacert is not None:
            log.debug('Setting zapi.session.verify to {0}' ''.format(cacert))
            self.zapi.session.verify = cacert

        if noverify:
            log.debug('Setting zapi.session.verify to False')
            self.zapi.session.verify = False

        self.zapi.timeout = timeout
        self.fetch_zabbix_api_version()  # Check the api

        token = self.cache.get(self.cache_slug)
        if token:
            log.debug('Found token for {0}'.format(host))
            self.zapi.auth = token
            # Let's test the token
            try:
                self.verify_token()
            except ZabbixNotAuthorized:
                self.zapi.auth = ''
                self.cache.delete(self.cache_slug)
예제 #5
0
    def request_proxy(self,
                      proxies,
                      path,
                      proxyid,
                      params={},
                      headers={},
                      timeout=10):
        log("(URL) Proxy domain is activated", LOGLEVEL.NONE)
        with Cache(proxyid) as proxies_cache:
            if not proxies_cache or not all(p in proxies_cache['proxies']
                                            for p in proxies):
                proxies_cache['proxies'] = proxies

            for proxy in proxies_cache['proxies'][:]:
                try:
                    if path == 'movie_favs':
                        _favs._create_movie_favs()
                        with open(_json_file) as json_read:
                            _data = json.load(json_read)
                    else:
                        _data = self.request(proxy, path, params, headers,
                                             timeout)
                    if _data or _data is None:
                        return _data
                except (HTTPError, socket.timeout, socket.gaierror,
                        socket.herror, socket.error) as e:
                    if e.__class__.__name__ == 'error':
                        if not e.errno in [
                                errno.EUSERS, errno.ECONNRESET,
                                errno.ETIMEDOUT, errno.ECONNREFUSED,
                                errno.EHOSTDOWN
                        ]:
                            raise
                    log(
                        "(URL) %s: %s - %s" %
                        (e.__class__.__name__, str(e), self.url),
                        LOGLEVEL.ERROR)
                    sys.exc_clear()
                log(
                    "(URL) Proxy domain '%s' is not working and will therefore have low priority in the future"
                    % proxy, LOGLEVEL.NOTICE)
                proxies_cache.extendKey('proxies',
                                        [proxies_cache['proxies'].pop(0)])
            raise ProxyError("There was not any domains that worked", 30328)
예제 #6
0
    def __init__(self, **kwargs):
        super().__init__()
        self._request_inter = Interaction("receive")
        self._udp_sender_inter = Interaction("udp_sender")
        self._cache = Cache("middleware")

        self._cache['fd'] = {}
        self._cache['pathname'] = {}
        self._cache['package'] = {}
        self._cache['user'] = {}

        self._mapper = UDPMapper()

        self._handlers = {
            "udp_read":
            TaskThread(target=kwargs.get("read", self._read), name="read"),
            "udp_write":
            TaskThread(target=kwargs.get("write", self._write), name="write")
        }
예제 #7
0
    def __init__(self, **kwargs):
        super().__init__()
        self._request_inter = Interaction("receive")
        self._sender_inter = Interaction("sender")

        self._handlers = {
            "alive":
            TaskThread(target=kwargs.get("alive", self._alive), name="alive"),
            "status":
            TaskThread(target=kwargs.get("status", self._status),
                       name="status"),
            "init":
            TaskThread(target=kwargs.get("init", self._init), name="init"),
        }

        self._cache = Cache("cluster_manager")
        self._cache['node_load'] = {}

        self._mapper = HealthMapper()
예제 #8
0
    def authenticate_credentials(self, credentials):
        decoded_auth = b64decode(credentials)
        client_id, _, signature = decoded_auth.partition(':')

        client = Client.objects.filter(pk=client_id).first()
        if client is None:
            raise Unauthorized

        timestamp_header = os.getenv('HMAC_TIMESTAMP_HEADER', 'Timestamp')
        nonce_header = os.getenv('HMAC_NONCE_HEADER', 'Nonce')

        timestamp = request.headers.get(timestamp_header)
        nonce = request.headers.get(nonce_header)

        msg = "{method}{path}{payload}{timestamp}{nonce}".format(
            method=request.method,
            path=request.path,
            payload=request.data,
            timestamp=timestamp,
            nonce=nonce)

        digest = hmac.HMAC(key=client.secret.encode(),
                           msg=msg.encode(),
                           digestmod=sha256)

        calculated_signature = b64encode(digest).decode()

        if signature != calculated_signature:
            raise Unauthorized

        hmac_expires = int(os.getenv('HMAC_EXPIRES', 60 * 5))

        timestamp = float(timestamp)
        if time.time() - timestamp > hmac_expires:
            raise Unauthorized

        cache = Cache(key_prefix='nonce')
        if nonce in cache:
            raise Unauthorized

        cache.set(nonce, True, timeout=hmac_expires)

        return client
예제 #9
0
    def __init__(self, **kwargs):
        super().__init__()
        self._request_inter = Interaction("receive")
        self._tcp_sender_inter = Interaction("tcp_sender")
        self._udp_sender_inter = Interaction("udp_sender")
        self._handlers = {
            "open":
            TaskThread(target=kwargs.get("open", self._open), name="open"),
            "flush":
            TaskThread(target=kwargs.get("flush", self._flush), name="flush"),
            "read":
            TaskThread(target=kwargs.get("read", self._read), name="read"),
            "write":
            TaskThread(target=kwargs.get("write", self._write), name="write"),
            "getattr":
            TaskThread(target=kwargs.get("getattr", self._getattr),
                       name="getattr"),
            "readdir":
            TaskThread(target=kwargs.get("readdir", self._readdir),
                       name="readdir"),
            "create":
            TaskThread(target=kwargs.get("create", self._create),
                       name="create"),
            "mkdir":
            TaskThread(target=kwargs.get("mkdir", self._mkdir), name="mkdir"),
            "load":
            TaskThread(target=kwargs.get("load", self._load), name="load"),
            "cache_add":
            TaskThread(target=kwargs.get("cache_add", self._cache_add),
                       name="cache_add")
        }

        self._cache = Cache("middleware")

        self._cache['fd'] = {}
        self._cache['pathname'] = {}
        self._cache['package'] = {}
        self._cache['user'] = {}

        self._mapper = TCPMapper()
예제 #10
0
    def __init__(self, **kwargs):
        super().__init__()
        self._request_inter = Interaction("receive")
        self._sender_inter = Interaction("sender")

        self._handlers = {
            "read":
            TaskThread(target=kwargs.get("read", self._read), name="read"),
            "write":
            TaskThread(target=kwargs.get("write", self._write), name="write"),
            "create":
            TaskThread(target=kwargs.get("create", self._create),
                       name="create"),
            "mkdir":
            TaskThread(target=kwargs.get("mkdir", self._mkdir), name="mkdir"),
            "status":
            TaskThread(target=kwargs.get("status", self._status),
                       name="status"),
        }

        self._cache = Cache("cluster_manager")
        self._cache['node_load'] = {}

        self._mapper = ClusterManagerMapper()
예제 #11
0
def amoeba_mrcp(empirical_game,
                full_game,
                approximation=False,
                var='uni',
                max_iter=5000,
                ftolerance=1.e-4,
                xtolerance=1.e-4,
                discount=0.05):
    """
    Note each varibale in the amoeba variable is two times the length of the strategies
    Input:
        empirical_game : each player's strategy set
        full_game      : the full meta game to compute mrcp on
        approximation  : whether to approximate the regret of mixed strategy using deviation payoff of pure profile.
        var            : initial guessing for the solution. defaulted to uniform
        max_iter       : maximum iteration of amoeba to automatically end
        ftolerance     : smallest difference of best and worst vertex to converge
        xtolerance     : smallest difference in average point and worst point of simplex
    """
    def normalize(sections, variables):
        """
        A variable made of len(sections) parts, each of the parts is
        in a probability simplex
        Input:
            variables: the varible that amoeba is searching through
            sections : a list containing number of element for each section.
                       Typically it is the list of number of strategies
        Output:
            A normalized version of the varibales by sections
        """
        pointer = 0
        for ele in np.cumsum(sections):
            variables[pointer:ele] /= sum(variables[pointer:ele])
            pointer = ele
        return variables

    # construct function for query
    if approximation:
        # Calculate the upper-bounded regret of mixed strategy profile.
        caches = [Cache(), Cache()]
        caches = find_all_deviation_payoffs(empirical_games=empirical_game,
                                            meta_game=full_game,
                                            caches=caches)

        # print("Cache0:", caches[0].cache.items())
        # print("Cache1:", caches[1].cache.items())

        func = partial(upper_bouned_regret_of_variable,
                       empirical_games=empirical_game,
                       meta_game=full_game,
                       caches=caches,
                       discount=discount)

        # func = partial(sampled_bouned_regret_of_variable,
        #                empirical_games=empirical_game,
        #                meta_game=full_game,
        #                caches=caches,
        #                discount=discount)

        # func = partial(regret_of_variable,
        #                empirical_games=empirical_game,
        #                meta_game=full_game,
        #                sum_regret=True)

    else:
        # Calculate the exact regret of mixed strategy profile.
        func = partial(regret_of_variable,
                       empirical_games=empirical_game,
                       meta_game=full_game)

    # TODO: check if repeated action is allowed in emprical game.
    sections = [len(ele)
                for ele in empirical_game]  # num strategies for players
    normalize = partial(normalize, sections=sections)  # force into simplex
    if var == 'uni':
        var = np.ones(
            sum(sections))  # the initial point of search from uniform
    elif var == 'rand':  # random initial points
        var = np.random.rand(sum(sections))
    else:
        assert len(var) == sum(sections), 'initial points incorrect shape'

    var = normalize(variables=var)

    nvar = sum(sections)  # total number of variables to minimize over
    nsimplex = nvar + 1  # number of points in the simplex

    # Set up the simplex. The first point is the guess. All sides of simplex
    # have length |c|. Please tweak this value should constraints be violated
    # assume if vertexes on simplex is normalized, then reflection, expansion
    # shrink will be on the probability simplex
    c = 1
    val_b = c / nvar / sqrt(2) * (sqrt(nvar + 1) - 1)
    val_a = val_b + c / sqrt(2)

    simplex = [0] * nsimplex
    simplex[0] = var[:]

    for i in range(nvar):
        addition_vector = np.ones(sum(sections)) * val_b
        addition_vector[i] = val_a
        simplex[i + 1] = normalize(variables=simplex[0] + addition_vector)

    fvalue = []
    for i in range(nsimplex):  # set the function values for the simplex
        fvalue.append(func(simplex[i]))

    # Start of the Ameoba Method.
    iteration = 0

    while iteration < max_iter:

        # sort the simplex and the fvalue the last one is the worst
        sort_index = np.argsort(fvalue)
        fvalue = [fvalue[ele] for ele in sort_index]
        simplex = [simplex[ele] for ele in sort_index]

        # get the average of the the n points except from the worst
        x_a = np.average(np.array(simplex[:-1]), axis=0)
        if not check_within_probability_simplex(x_a):
            x_a = variable_projection(x_a, sections)
        # assert check_within_probability_simplex(x_a), 'centroid not in probability simplex'

        # determine the termination criteria
        # 1. distance between average and worst
        simscale = np.sum(np.absolute(x_a - simplex[-1])) / nvar
        # 2. distance between best and worst function values
        fscale = (abs(fvalue[0]) + abs(fvalue[-1])) / 2.0
        if fscale != 0.0:
            frange = abs(fvalue[0] - fvalue[-1]) / fscale
        else:
            frange = 0.0  # all the fvalues are zero in this case

        # Convergence Checking
        if (ftolerance <= 0.0 or frange < ftolerance) \
                and (xtolerance <= 0.0 or simscale < xtolerance):
            return np.split(simplex[0],
                            sections[:-1]), fvalue[0], iteration, simplex[0]

        # perform reflection to acquire x_r,evaluate f_r
        alpha = 1
        x_r = x_a + alpha * (x_a - simplex[-1])
        x_r = infeasibility_handling(var=x_r,
                                     sections=sections,
                                     base=x_a,
                                     step_size=alpha,
                                     minus=simplex[-1])
        f_r = func(x_r)

        # expansion if the reflection is better
        if f_r < fvalue[0]:  # expansion if the reflection is better
            gamma = 1
            x_e = x_r + gamma * (x_r - x_a)
            x_e = infeasibility_handling(var=x_e,
                                         sections=sections,
                                         base=x_r,
                                         step_size=gamma,
                                         minus=x_a)

            f_e = func(x_e)
            if f_e < f_r:  # accept expansion and replace the worst point
                simplex[-1] = x_e
                fvalue[-1] = f_e
            else:  # refuse expansion and accept reflection
                simplex[-1] = x_r
                fvalue[-1] = f_r
        elif f_r < fvalue[-2]:  # accept reflection when better than lousy
            simplex[-1] = x_r
            fvalue[-1] = f_r
        else:
            if f_r > fvalue[
                    -1]:  # inside contract if reflection is worst than worst
                x_c = x_a - 0.5 * (x_a - simplex[-1]
                                   )  # 0.5 being a hyperparameter
                f_c = func(x_c)
                if f_c < fvalue[-1]:  # accept inside contraction
                    simplex[-1] = x_c
                    fvalue[-1] = f_c
                else:
                    simplex, fvalue = shrink_simplex(simplex, fvalue, func)
            else:  # outside contract if reflection better than worse
                x_c = x_a + alpha * 0.5 * (x_a - simplex[-1]
                                           )  # 0.5 being a hyperparameter
                f_c = func(x_c)
                if f_c < f_r:  # accept contraction
                    simplex[-1] = x_c
                    fvalue[-1] = f_c
                else:
                    simplex, fvalue = shrink_simplex(simplex, fvalue, func)
        iteration += 1
    sort_index = np.argsort(fvalue)
    fvalue = [fvalue[ele] for ele in sort_index]
    simplex = [simplex[ele] for ele in sort_index]

    return np.split(simplex[0],
                    sections[:-1]), fvalue[0], iteration, simplex[0]
from collections import defaultdict
from typing import Iterator, Dict, Set, List, Tuple

import numpy as np
# from keras.models import load_model

from file_utils import GenomeReadDataReader
from structures import GenomeReadData, GenomeReadCluster, ClusterMergeCase
from utils import iter_with_progress, Cache

KmerIndex = Dict[str, Set[int]]
ClusterConnection = Tuple[int, int, int]

# model = load_model('ecoli_model.hdf5')

cache = Cache()


def get_clusters_from_reads(
        reads: Iterator[GenomeReadData]) -> List[GenomeReadCluster]:
    cluster_id_counter = itertools.count()
    clusters = []
    for read in reads:
        if not read.characteristic_kmers:
            continue

        clusters.append(
            GenomeReadCluster(
                reference_id=next(cluster_id_counter),
                characteristic_kmers=read.characteristic_kmers.copy(),
                reads=[read]))
from kafka.structs import TopicPartition
from utils import kafka_consumer, Cache, get_web3

# 设置日志
logger = Logger(__name__, filename='block_transaction.log')
# 记录异常
# 1、topic 没有正常创建
# 2、重复获得相同高度的区块
logger_err = Logger(f'{__name__}_err', filename='err_block_transaction.log')

# web3 连接
w3 = get_web3()

# 区块缓存:遇到了相近的问题
# https://ethereum.stackexchange.com/questions/87227/duplicate-events-from-get-new-entries-using-web3-py
block_cache = Cache(maxlen=5)


def send_block(height_or_hash):
    """
    获得指定高度的区块数据并发送到 kafka
    :param height_or_hash:
    :return:
    """
    # 获取指定的区块,并加入缓存列表
    block1 = w3.eth.getBlock(height_or_hash, True)
    if block1.number in block_cache:
        logger_err.error(f'获得重复区块高度 {block1.number}')
        block_cache[block1.number] = block1
        return
    block_cache[block1.number] = block1