def __init__(self, shape, train=7, val=1, test=2): self.labels = 17 # read data from cache or origin tgz cache = Cache('./cache/17flowers{}.pkl'.format(shape)) self.data = cache.load() if not self.data: from IO import load_flowers17 self.data = load_flowers17('./data/17flowers.tgz', shape) cache.save(self.data) path = './data/flowers17.pkl' if os.path.isfile(path): with open(path, 'rb') as f: self.train, self.val, self.test = pickle.load(f) else: self.train, self.val, self.test = \ Sample(), Sample(), Sample() self.split_data(train, val, test) self.train.to_np() self.val.to_np() self.test.to_np() with open(path, 'wb') as f: pickle.dump([self.train, self.val, self.test], f) self.train_list = range(len(self.train.image)) self.val_list = range(len(self.val.image)) self.test_list = range(len(self.test.image))
def __init__(self, *, timeout=None, headers=cfg.default_headers, \ headers_path=None, keys=None, cache_path='html_cache',\ encoding=None): self.sess = requests.Session() self.timeout=timeout self.headers = self.make_headers(headers_path, keys) \ if isinstance(headers_path, str) else headers self.cache_path = cache_path self.cache = Cache(self.cache_path) self.encoding = encoding
def __init__(self, host, uri_path, user, noverify=False, cacert=None, http=False, timeout=30): """ Initializes a Zabbix instance :param host: hostname to connect to (ex. zabbix.yourdomain.net) :param user: username to connect with (ex. Admin) :param uri_path: uri path to zabbix api (ex. zabbix) :param noverify: turns off verification :param cacert: the certificate authority to use :param http: flag to use http over https :param timeout: API timeout parameter :return: Zabbix instance """ self.cache = Cache('/tmp/zabbix.cache') self.host = host self.cache_slug = '{0}-{1}'.format(host, user) zabbix_url = urlunparse([ 'http' if http else 'https', host.strip('/'), uri_path, '', '', '' ]) log.debug("Creating instance of Zabbic with url: %s", zabbix_url) self.zapi = ZabbixAPI(zabbix_url) if cacert is not None: log.debug('Setting zapi.session.verify to {0}' ''.format(cacert)) self.zapi.session.verify = cacert if noverify: log.debug('Setting zapi.session.verify to False') self.zapi.session.verify = False self.zapi.timeout = timeout self.fetch_zabbix_api_version() # Check the api token = self.cache.get(self.cache_slug) if token: log.debug('Found token for {0}'.format(host)) self.zapi.auth = token # Let's test the token try: self.verify_token() except ZabbixNotAuthorized: self.zapi.auth = '' self.cache.delete(self.cache_slug)
def get_devices(self, refresh=False): if self.cache: if refresh is True or not Cache.get(self.cache): result = self._get_devices() Cache.set(self.cache, result) else: result = Cache.get(self.cache) else: result = self._get_devices() return result
def login(self, url, success_judge, \ certcode_url=None, cache_name='./data', anonymous=False, \ **kwargs): from getpass import getpass _input = getpass if anonymous else input def get_certcode(url): print('[LOG] downloading certcode from %s ..' % url) name = 'certcode.jpg' certcode_path = os.path.join(self.cache_path, name) self.download(name, url) print('[LOG] try to import cv2..') try: import cv2 img = cv2.imread(certcode_path) if 'recognition' in kwargs: print('[LOG] use auto recognition instead!') return kwargs['recognition'](img, kwargs['pattern']) print('[SUC] successfully import cv2. please watch the image window and input certcode..') cv2.imshow(cache_name, img) cv2.waitKey(3000) except: print('[ERR] failed to import cv2. please input certcode from %s' % certcode_path) certcode = str(input('[I N] certcode: ')) return certcode print('[LOG] start to login %s ..' % url) information = Cache(cache_name) data = information.load('login') if not data: username = _input('[I N] username: '******'[I N] password: '******'[I N] tel: ') #### #### #### #### # change data format to what target website needs data = { 'nickName': username, 'password': password, 'logintype': 'PLATFORM', } #### #### #### #### information.save(tel, 'tel') information.save(data, 'login') certcode = get_certcode(certcode_url) if certcode_url else None #### #### #### #### # change data format to what target website needs if certcode: data['checkCode'] = certcode #### #### #### #### print('[LOG] hi, user %s.' % (data['nickName'] \ if not anonymous else 'anonymous')) response = self.post(url, data=data, headers=self.headers) self.html_save(response, 'login.html') return success_judge(response)
def authenticate_credentials(self, credentials): decoded_auth = b64decode(credentials) client_id, _, signature = decoded_auth.partition(':') client = Client.objects.filter(pk=client_id).first() if client is None: raise Unauthorized timestamp_header = os.getenv('HMAC_TIMESTAMP_HEADER', 'Timestamp') nonce_header = os.getenv('HMAC_NONCE_HEADER', 'Nonce') timestamp = request.headers.get(timestamp_header) nonce = request.headers.get(nonce_header) msg = "{method}{path}{payload}{timestamp}{nonce}".format( method=request.method, path=request.path, payload=request.data, timestamp=timestamp, nonce=nonce) digest = hmac.HMAC(key=client.secret.encode(), msg=msg.encode(), digestmod=sha256) calculated_signature = b64encode(digest).decode() if signature != calculated_signature: raise Unauthorized hmac_expires = int(os.getenv('HMAC_EXPIRES', 60 * 5)) timestamp = float(timestamp) if time.time() - timestamp > hmac_expires: raise Unauthorized cache = Cache(key_prefix='nonce') if nonce in cache: raise Unauthorized cache.set(nonce, True, timeout=hmac_expires) return client
def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._sender_inter = Interaction("sender") self._handlers = { "alive": TaskThread(target=kwargs.get("alive", self._alive), name="alive"), "status": TaskThread(target=kwargs.get("status", self._status), name="status"), "init": TaskThread(target=kwargs.get("init", self._init), name="init"), } self._cache = Cache("cluster_manager") self._cache['node_load'] = {} self._mapper = HealthMapper()
class RedditorFacet(thing.ThingFacet): name = "redditor" commands = command.thing.add_child(command.FacetCommandSet(name)) def __init__(self, thing_): thing.ThingFacet.__init__(self, thing_) self.get_info = Cache(self._get_info, expire_seconds=10*60) @classmethod def does_attach(cls, thing): return False @commands.add(u"forget that {thing} is a redditor", help=u"unset {thing}'s reddit username", exclusive=True) def unset_redditor(self, thing, context): del self.data self.thing.detach_persistent(self) @commands.add(u"{thing} has reddit username {username}", help=u"set {thing}'s reddit username to {username}") def set_redditor_username(self, thing, username, context): self.username = username @property def username(self): if self.has_data and "username" in self.data: return self.data["username"] else: return self.thing.name @username.setter def username(self, value): if value != self.data["username"]: self.data["username"] = value self.get_info.reset() def _get_info(self): about_url = "http://www.reddit.com/user/{0}/about.json" about = urllib.urlopen(about_url.format(self.username)) return json.load(about)["data"]
def __init__(self, host, uri_path, user, noverify=False, cacert=None, http=False, timeout=30): """ Initializes a Zabbix instance :param host: hostname to connect to (ex. zabbix.yourdomain.net) :param user: username to connect with (ex. Admin) :param uri_path: uri path to zabbix api (ex. zabbix) :param noverify: turns off verification :param cacert: the certificate authority to use :param http: flag to use http over https :param timeout: API timeout parameter :return: Zabbix instance """ self.cache = Cache('/tmp/zabbix.cache') self.host = host self.cache_slug = '{0}-{1}'.format(host, user) zabbix_url = urlunparse([ 'http' if http else 'https', host.strip('/'), uri_path, '', '', '' ] ) log.debug("Creating instance of Zabbic with url: %s", zabbix_url) self.zapi = ZabbixAPI(zabbix_url) if cacert is not None: log.debug('Setting zapi.session.verify to {0}' ''.format(cacert)) self.zapi.session.verify = cacert if noverify: log.debug('Setting zapi.session.verify to False') self.zapi.session.verify = False self.zapi.timeout = timeout self.fetch_zabbix_api_version() # Check the api token = self.cache.get(self.cache_slug) if token: log.debug('Found token for {0}'.format(host)) self.zapi.auth = token # Let's test the token try: self.verify_token() except ZabbixNotAuthorized: self.zapi.auth = '' self.cache.delete(self.cache_slug)
def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._sender_inter = Interaction("sender") self._handlers = { "read": TaskThread(target=kwargs.get("read", self._read), name="read"), "write": TaskThread(target=kwargs.get("write", self._write), name="write"), "create": TaskThread(target=kwargs.get("create", self._create), name="create"), "mkdir": TaskThread(target=kwargs.get("mkdir", self._mkdir), name="mkdir"), "status": TaskThread(target=kwargs.get("status", self._status), name="status"), } self._cache = Cache("cluster_manager") self._cache['node_load'] = {} self._mapper = ClusterManagerMapper()
def average_runs(name, method): num_runs = Config.get()["num_runs"] new_percent_list = np.arange( 0.1, # self.label_percent, 0.4, # self.max_percent, 0.015 # self.batch_percent / 2.0 ) rmse_avg_list = np.zeros((len(new_percent_list))) mae_avg_list = np.zeros((len(new_percent_list))) for run_id in range(num_runs): key = "{}_{}_{}".format(name, method, run_id) percent_list, rmse_list, mae_list = Cache.get(key) rmse_avg_list += np.interp(new_percent_list, percent_list, rmse_list) mae_avg_list += np.interp(new_percent_list, percent_list, mae_list) rmse_avg_list /= num_runs mae_avg_list /= num_runs return new_percent_list, rmse_avg_list, mae_avg_list
def request_proxy(self, proxies, path, proxyid, params={}, headers={}, timeout=10): log("(URL) Proxy domain is activated", LOGLEVEL.NONE) with Cache(proxyid) as proxies_cache: if not proxies_cache or not all(p in proxies_cache['proxies'] for p in proxies): proxies_cache['proxies'] = proxies for proxy in proxies_cache['proxies'][:]: try: if path == 'movie_favs': _favs._create_movie_favs() with open(_json_file) as json_read: _data = json.load(json_read) else: _data = self.request(proxy, path, params, headers, timeout) if _data or _data is None: return _data except (HTTPError, socket.timeout, socket.gaierror, socket.herror, socket.error) as e: if e.__class__.__name__ == 'error': if not e.errno in [ errno.EUSERS, errno.ECONNRESET, errno.ETIMEDOUT, errno.ECONNREFUSED, errno.EHOSTDOWN ]: raise log( "(URL) %s: %s - %s" % (e.__class__.__name__, str(e), self.url), LOGLEVEL.ERROR) sys.exc_clear() log( "(URL) Proxy domain '%s' is not working and will therefore have low priority in the future" % proxy, LOGLEVEL.NOTICE) proxies_cache.extendKey('proxies', [proxies_cache['proxies'].pop(0)]) raise ProxyError("There was not any domains that worked", 30328)
def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._udp_sender_inter = Interaction("udp_sender") self._cache = Cache("middleware") self._cache['fd'] = {} self._cache['pathname'] = {} self._cache['package'] = {} self._cache['user'] = {} self._mapper = UDPMapper() self._handlers = { "udp_read": TaskThread(target=kwargs.get("read", self._read), name="read"), "udp_write": TaskThread(target=kwargs.get("write", self._write), name="write") }
def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._tcp_sender_inter = Interaction("tcp_sender") self._udp_sender_inter = Interaction("udp_sender") self._handlers = { "open": TaskThread(target=kwargs.get("open", self._open), name="open"), "flush": TaskThread(target=kwargs.get("flush", self._flush), name="flush"), "read": TaskThread(target=kwargs.get("read", self._read), name="read"), "write": TaskThread(target=kwargs.get("write", self._write), name="write"), "getattr": TaskThread(target=kwargs.get("getattr", self._getattr), name="getattr"), "readdir": TaskThread(target=kwargs.get("readdir", self._readdir), name="readdir"), "create": TaskThread(target=kwargs.get("create", self._create), name="create"), "mkdir": TaskThread(target=kwargs.get("mkdir", self._mkdir), name="mkdir"), "load": TaskThread(target=kwargs.get("load", self._load), name="load"), "cache_add": TaskThread(target=kwargs.get("cache_add", self._cache_add), name="cache_add") } self._cache = Cache("middleware") self._cache['fd'] = {} self._cache['pathname'] = {} self._cache['package'] = {} self._cache['user'] = {} self._mapper = TCPMapper()
class Zabbix(object): def __init__(self, host, uri_path, user, noverify=False, cacert=None, http=False, timeout=30): """ Initializes a Zabbix instance :param host: hostname to connect to (ex. zabbix.yourdomain.net) :param user: username to connect with (ex. Admin) :param uri_path: uri path to zabbix api (ex. zabbix) :param noverify: turns off verification :param cacert: the certificate authority to use :param http: flag to use http over https :param timeout: API timeout parameter :return: Zabbix instance """ self.cache = Cache('/tmp/zabbix.cache') self.host = host self.cache_slug = '{0}-{1}'.format(host, user) zabbix_url = urlunparse([ 'http' if http else 'https', host.strip('/'), uri_path, '', '', '' ]) log.debug("Creating instance of Zabbic with url: %s", zabbix_url) self.zapi = ZabbixAPI(zabbix_url) if cacert is not None: log.debug('Setting zapi.session.verify to {0}' ''.format(cacert)) self.zapi.session.verify = cacert if noverify: log.debug('Setting zapi.session.verify to False') self.zapi.session.verify = False self.zapi.timeout = timeout self.fetch_zabbix_api_version() # Check the api token = self.cache.get(self.cache_slug) if token: log.debug('Found token for {0}'.format(host)) self.zapi.auth = token # Let's test the token try: self.verify_token() except ZabbixNotAuthorized: self.zapi.auth = '' self.cache.delete(self.cache_slug) def fetch_zabbix_api_version(self): """ reaches out to the zapi api info to parse the string :return: Version string or False """ try: return self.zapi.apiinfo.version() except (HTTPError, ConnectionError, ZabbixAPIException) as e: raise ZabbixError(e) def verify_token(self): """ Runs the zapi.host.get(limit=1) call to test the current token :return: Nothing """ try: self.zapi.host.get(limit=1) except (HTTPError, ConnectionError, ZabbixAPIException) as e: # todo: cant we check by the error, not its string? if any([ 'Not authorised' in str(e), 'Not authorized' in str(e), 'Session terminated,' in str(e) ]): log.debug('Token not authorized for {0}'.format(self.host)) raise ZabbixNotAuthorized raise ZabbixError(e) def auth(self, username, password): """ Performs the loggin function of the api with the supplied credentials :param username: username :param password: password :return: True is valid, False otherwise """ try: self.zapi.login(username, password) self.cache.write(self.cache_slug, self.zapi.auth) except ZabbixAPIException as e: raise ZabbixNotAuthorized('Username or password invalid') return True
def amoeba_mrcp(empirical_game, full_game, approximation=False, var='uni', max_iter=5000, ftolerance=1.e-4, xtolerance=1.e-4, discount=0.05): """ Note each varibale in the amoeba variable is two times the length of the strategies Input: empirical_game : each player's strategy set full_game : the full meta game to compute mrcp on approximation : whether to approximate the regret of mixed strategy using deviation payoff of pure profile. var : initial guessing for the solution. defaulted to uniform max_iter : maximum iteration of amoeba to automatically end ftolerance : smallest difference of best and worst vertex to converge xtolerance : smallest difference in average point and worst point of simplex """ def normalize(sections, variables): """ A variable made of len(sections) parts, each of the parts is in a probability simplex Input: variables: the varible that amoeba is searching through sections : a list containing number of element for each section. Typically it is the list of number of strategies Output: A normalized version of the varibales by sections """ pointer = 0 for ele in np.cumsum(sections): variables[pointer:ele] /= sum(variables[pointer:ele]) pointer = ele return variables # construct function for query if approximation: # Calculate the upper-bounded regret of mixed strategy profile. caches = [Cache(), Cache()] caches = find_all_deviation_payoffs(empirical_games=empirical_game, meta_game=full_game, caches=caches) # print("Cache0:", caches[0].cache.items()) # print("Cache1:", caches[1].cache.items()) func = partial(upper_bouned_regret_of_variable, empirical_games=empirical_game, meta_game=full_game, caches=caches, discount=discount) # func = partial(sampled_bouned_regret_of_variable, # empirical_games=empirical_game, # meta_game=full_game, # caches=caches, # discount=discount) # func = partial(regret_of_variable, # empirical_games=empirical_game, # meta_game=full_game, # sum_regret=True) else: # Calculate the exact regret of mixed strategy profile. func = partial(regret_of_variable, empirical_games=empirical_game, meta_game=full_game) # TODO: check if repeated action is allowed in emprical game. sections = [len(ele) for ele in empirical_game] # num strategies for players normalize = partial(normalize, sections=sections) # force into simplex if var == 'uni': var = np.ones( sum(sections)) # the initial point of search from uniform elif var == 'rand': # random initial points var = np.random.rand(sum(sections)) else: assert len(var) == sum(sections), 'initial points incorrect shape' var = normalize(variables=var) nvar = sum(sections) # total number of variables to minimize over nsimplex = nvar + 1 # number of points in the simplex # Set up the simplex. The first point is the guess. All sides of simplex # have length |c|. Please tweak this value should constraints be violated # assume if vertexes on simplex is normalized, then reflection, expansion # shrink will be on the probability simplex c = 1 val_b = c / nvar / sqrt(2) * (sqrt(nvar + 1) - 1) val_a = val_b + c / sqrt(2) simplex = [0] * nsimplex simplex[0] = var[:] for i in range(nvar): addition_vector = np.ones(sum(sections)) * val_b addition_vector[i] = val_a simplex[i + 1] = normalize(variables=simplex[0] + addition_vector) fvalue = [] for i in range(nsimplex): # set the function values for the simplex fvalue.append(func(simplex[i])) # Start of the Ameoba Method. iteration = 0 while iteration < max_iter: # sort the simplex and the fvalue the last one is the worst sort_index = np.argsort(fvalue) fvalue = [fvalue[ele] for ele in sort_index] simplex = [simplex[ele] for ele in sort_index] # get the average of the the n points except from the worst x_a = np.average(np.array(simplex[:-1]), axis=0) if not check_within_probability_simplex(x_a): x_a = variable_projection(x_a, sections) # assert check_within_probability_simplex(x_a), 'centroid not in probability simplex' # determine the termination criteria # 1. distance between average and worst simscale = np.sum(np.absolute(x_a - simplex[-1])) / nvar # 2. distance between best and worst function values fscale = (abs(fvalue[0]) + abs(fvalue[-1])) / 2.0 if fscale != 0.0: frange = abs(fvalue[0] - fvalue[-1]) / fscale else: frange = 0.0 # all the fvalues are zero in this case # Convergence Checking if (ftolerance <= 0.0 or frange < ftolerance) \ and (xtolerance <= 0.0 or simscale < xtolerance): return np.split(simplex[0], sections[:-1]), fvalue[0], iteration, simplex[0] # perform reflection to acquire x_r,evaluate f_r alpha = 1 x_r = x_a + alpha * (x_a - simplex[-1]) x_r = infeasibility_handling(var=x_r, sections=sections, base=x_a, step_size=alpha, minus=simplex[-1]) f_r = func(x_r) # expansion if the reflection is better if f_r < fvalue[0]: # expansion if the reflection is better gamma = 1 x_e = x_r + gamma * (x_r - x_a) x_e = infeasibility_handling(var=x_e, sections=sections, base=x_r, step_size=gamma, minus=x_a) f_e = func(x_e) if f_e < f_r: # accept expansion and replace the worst point simplex[-1] = x_e fvalue[-1] = f_e else: # refuse expansion and accept reflection simplex[-1] = x_r fvalue[-1] = f_r elif f_r < fvalue[-2]: # accept reflection when better than lousy simplex[-1] = x_r fvalue[-1] = f_r else: if f_r > fvalue[ -1]: # inside contract if reflection is worst than worst x_c = x_a - 0.5 * (x_a - simplex[-1] ) # 0.5 being a hyperparameter f_c = func(x_c) if f_c < fvalue[-1]: # accept inside contraction simplex[-1] = x_c fvalue[-1] = f_c else: simplex, fvalue = shrink_simplex(simplex, fvalue, func) else: # outside contract if reflection better than worse x_c = x_a + alpha * 0.5 * (x_a - simplex[-1] ) # 0.5 being a hyperparameter f_c = func(x_c) if f_c < f_r: # accept contraction simplex[-1] = x_c fvalue[-1] = f_c else: simplex, fvalue = shrink_simplex(simplex, fvalue, func) iteration += 1 sort_index = np.argsort(fvalue) fvalue = [fvalue[ele] for ele in sort_index] simplex = [simplex[ele] for ele in sort_index] return np.split(simplex[0], sections[:-1]), fvalue[0], iteration, simplex[0]
def __init__(self, thing_): thing.ThingFacet.__init__(self, thing_) self.get_info = Cache(self._get_info, expire_seconds=10*60)
class ClusterManagerDispatcher(object): def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._sender_inter = Interaction("sender") self._handlers = { "read": TaskThread(target=kwargs.get("read", self._read), name="read"), "write": TaskThread(target=kwargs.get("write", self._write), name="write"), "create": TaskThread(target=kwargs.get("create", self._create), name="create"), "mkdir": TaskThread(target=kwargs.get("mkdir", self._mkdir), name="mkdir"), "status": TaskThread(target=kwargs.get("status", self._status), name="status"), } self._cache = Cache("cluster_manager") self._cache['node_load'] = {} self._mapper = ClusterManagerMapper() def dispatch(self, data, address): command, *payload = data.decode("utf-8").split('&') print(command, payload) handler = self._handlers.get(command, None) if handler: handler.add_task((payload, address)) else: print("Bad command: ", command) def add_handler(self, key, handler_func): self._handlers[key] = TaskThread(target=handler_func, name=key) self._handlers[key].start() def start(self): for handler in self._handlers.values(): handler.start() def stop(self): for handler in self._handlers.values(): handler.stop() handler.join() def __recalc_load(self, node_id=None): if node_id: self._cache['node_load'][node_id] = self._cache[node_id][ 'free_size'] * self._cache[node_id]['pack_idle_count'] else: for key in self._cache.keys(): self._cache['node_load'][key] = self._cache[key][ 'free_size'] * self._cache[key]['pack_idle_count'] def __balance(self, package_count): res = [] for _ in range(package_count): node_load = copy(self._cache['node_load']) node_to_add = min(node_load.items(), key=lambda item: item[1])[0] res.append(node_to_add) self._cache[node_to_add]['free_size'] -= int( CF.get("Package", "data")) self._cache[node_to_add]['pack_idle_count'] += 1 return res def __serialize_dict(self, dict_to_ser): key = [] values = [] for item in dict_to_ser.items(): key.append(','.join(item[0])) node, package_id = item[1] values.append(','.join((str(node), str(package_id)))) key = '|'.join(key) values = '|'.join(values) return "&".join((key, values)) # FILE SYSTEM OPERATIONS def _read(self, data, *args, **kwargs): payload, address = data fd, pathname, max_package_count, offset = payload package_list = [] packages = self._mapper.query('select_package_by_pathname', pathname) searched_next_pack_id = None for _ in range(len(packages)): for package in packages: package = list(package) next_pack_id = package[2] if not next_pack_id: next_pack_id = None else: next_pack_id = int(next_pack_id) if next_pack_id == searched_next_pack_id: package[2] = str(len(package_list)) searched_next_pack_id = int(package[1]) package = map(lambda x: str(x), package) package = ','.join(package) package_list.insert(0, package) break message = "&".join(("load", str(fd), *package_list)) self._sender_inter.insert( (message, (CF.get("Middleware", "ip"), int(CF.get("Middleware", "port"))))) def _write(self, data, *args, **kwargs): payload, address = data pathname, package_count = payload package_count = int(package_count) print("1") node_list = self.__balance(package_count) print("1.5") self._mapper.query("update_file_status", ("False", pathname)) order_num, file_id = self._mapper.query("select_file_info", pathname)[0] print("2") order_num = int(order_num) pack_id_list = [] result_dict = {} for node in node_list: parent_id = self._mapper.query("select_parent_id", file_id) if parent_id: parent_id = int(parent_id[0][0]) package_id = int( self._mapper.query("insert_package", (node, file_id))[0][0]) if parent_id: self._mapper.query("update_package", (package_id, parent_id)) pack_id_list.append(package_id) result_dict[(pathname, str(order_num))] = (node, package_id) order_num += 1 print("3") if order_num == package_count: self._mapper.query("update_file_data", (pack_id_list[0], pathname)) self._mapper.query("update_file_order_num", (order_num, pathname)) message = "cache_add&" + self.__serialize_dict(result_dict) print("HERE: ", message, CF.get("Middleware", "ip")) self._sender_inter.insert( (message, (CF.get("Middleware", "ip"), int(CF.get("Middleware", "port"))))) def _create(self, data, *args, **kwargs): payload, address = data pathname, response_ip, response_port = payload file_id = self._mapper.query("insert_file", pathname, last_row_id=True)[0][0][0] dir_pathname = pathname.rsplit('/', 1)[0] if not dir_pathname: dir_pathname = '/' self._mapper.query("update_directory_data", (file_id, dir_pathname)) request = "&".join(("open", pathname, response_ip, response_port)) self._sender_inter.insert( (request, (CF.get("Middleware", "ip"), int(CF.get("Middleware", "port"))))) def _mkdir(self, data, *args, **kwargs): payload, _ = data pathname = payload[0] file_id = self._mapper.query("insert_directory", pathname)[0][0] pathname = str(pathname) parent_dir_pathname = pathname.rsplit('/', 1)[0] if not parent_dir_pathname: parent_dir_pathname = '/' if parent_dir_pathname != pathname: self._mapper.query("update_directory_data", (file_id, parent_dir_pathname)) def _status(self, data, *args, **kwargs): payload, _ = data file_id, status = payload self._mapper.query("update_file_status_by_file_id", (status, file_id))
class Zabbix(object): def __init__(self, host, uri_path, user, noverify=False, cacert=None, http=False, timeout=30): """ Initializes a Zabbix instance :param host: hostname to connect to (ex. zabbix.yourdomain.net) :param user: username to connect with (ex. Admin) :param uri_path: uri path to zabbix api (ex. zabbix) :param noverify: turns off verification :param cacert: the certificate authority to use :param http: flag to use http over https :param timeout: API timeout parameter :return: Zabbix instance """ self.cache = Cache('/tmp/zabbix.cache') self.host = host self.cache_slug = '{0}-{1}'.format(host, user) zabbix_url = urlunparse([ 'http' if http else 'https', host.strip('/'), uri_path, '', '', '' ] ) log.debug("Creating instance of Zabbic with url: %s", zabbix_url) self.zapi = ZabbixAPI(zabbix_url) if cacert is not None: log.debug('Setting zapi.session.verify to {0}' ''.format(cacert)) self.zapi.session.verify = cacert if noverify: log.debug('Setting zapi.session.verify to False') self.zapi.session.verify = False self.zapi.timeout = timeout self.fetch_zabbix_api_version() # Check the api token = self.cache.get(self.cache_slug) if token: log.debug('Found token for {0}'.format(host)) self.zapi.auth = token # Let's test the token by grabbing the api version try: self.fetch_zabbix_api_version() except ZabbixNotAuthorized: self.zapi.auth = '' def fetch_zabbix_api_version(self): """ reaches out to the zapi api info to parse the string :return: Version string or False """ try: return self.zapi.apiinfo.version() except (HTTPError, ConnectionError, ZabbixAPIException) as e: # todo: cant we check by the error, not its string? if 'Not authorized' in str(e): log.debug('Token not authorized for {0}'.format(self.host)) raise ZabbixNotAuthorized raise ZabbixError(e) def auth(self, username, password): """ Performs the loggin function of the api with the supplied credentials :param username: username :param password: password :return: True is valid, False otherwise """ try: self.zapi.login(username, password) self.cache.write(self.cache_slug, self.zapi.auth) except ZabbixAPIException as e: raise ZabbixNotAuthorized('Username or password invalid') return True
def evaluate(func, formula=None): if formula is None: score_mean, score_var = evaluate_cache(func) else: score_mean, score_var = Cache.process(formula, evaluate_cache, func) return score_mean + score_var
def run(self, run_id): key = "{}_{}_{}".format(self.name, self.method, run_id) if not self.use_cache: Cache.reset() return Cache.process(key, self.__run, run_id)
def clean(self): if self.cache: Cache.clear(self.cache)
class HealthDispatcher(object): def __init__(self, **kwargs): super().__init__() self._request_inter = Interaction("receive") self._sender_inter = Interaction("sender") self._handlers = { "alive": TaskThread(target=kwargs.get("alive", self._alive), name="alive"), "status": TaskThread(target=kwargs.get("status", self._status), name="status"), "init": TaskThread(target=kwargs.get("init", self._init), name="init"), } self._cache = Cache("cluster_manager") self._cache['node_load'] = {} self._mapper = HealthMapper() # self._cache['fd'] = {} def dispatch(self, data, address): command, *payload = data.decode("utf-8").split('&') print(command, payload) handler = self._handlers.get(command, None) if handler: handler.add_task((payload, address)) else: print("Bad command: ", command) #handler.delay((payload, address)) def add_handler(self, key, handler_func): self._handlers[key] = TaskThread(target=handler_func, name=key) self._handlers[key].start() def start(self): for handler in self._handlers.values(): handler.start() def stop(self): for handler in self._handlers.values(): handler.stop() handler.join() def _alive(self, data, *args, **kwargs): payload, _ = data node_id, status = payload node_id = int(node_id) if self._cache.get(node_id, None): if self._cache[node_id]['timer']: self._cache[node_id]['timer'].cancel() if status == "True": self._cache[node_id]['timer'] = Timer( int(CF.get("Node", "node_live_timeout")), self._alive, args=[((node_id, "False"), ())]) self._cache[node_id]['timer'].start() elif self._cache.get(node_id, None): del self._cache[node_id] del self._cache['node_load'][node_id] def _status(self, data, *args, **kwargs): payload, _ = data new_size, node_id, pack_id, status = payload node_id = int(node_id) self._mapper.query("update_package_status", (status, pack_id)) res, file_id = self._mapper.query("get_unready_package", (pack_id, pack_id)) res = res[0] file_id = file_id[0] self._mapper.query("update_file_size", (new_size, file_id)) self._cache[node_id]['pack_idle_count'] -= 1 self._cache['node_load'][node_id] = self._cache[node_id][ 'pack_idle_count'] * self._cache[node_id]['free_size'] if not res: message = "&".join(("status", str(file_id), "True")) self._sender_inter.insert( (message, (CF.get("Receiver", "ip"), int(CF.get("Receiver", "tcp_port"))))) def _init(self, data, *args, **kwargs): payload, address = data tcp_port, udp_port, free_size = payload res = self._mapper.query("get_node_by_address", (address[0], int(tcp_port))) if not res: node_id = self._mapper.query( "insert_new_node", (address[0], tcp_port, udp_port))[0][0] else: node_id = int(res[0][0]) res = "&".join(("init", str(node_id))) self._sender_inter.insert((res, (address[0], int(tcp_port)))) self._cache[node_id] = { 'timer': None, 'ip': address[0], 'tcp_port': int(tcp_port), 'udp_port': int(udp_port), 'free_size': int(free_size), 'pack_idle_count': 0 } self._cache['node_load'][node_id] = 0
from kafka.structs import TopicPartition from utils import kafka_consumer, Cache, get_web3 # 设置日志 logger = Logger(__name__, filename='block_transaction.log') # 记录异常 # 1、topic 没有正常创建 # 2、重复获得相同高度的区块 logger_err = Logger(f'{__name__}_err', filename='err_block_transaction.log') # web3 连接 w3 = get_web3() # 区块缓存:遇到了相近的问题 # https://ethereum.stackexchange.com/questions/87227/duplicate-events-from-get-new-entries-using-web3-py block_cache = Cache(maxlen=5) def send_block(height_or_hash): """ 获得指定高度的区块数据并发送到 kafka :param height_or_hash: :return: """ # 获取指定的区块,并加入缓存列表 block1 = w3.eth.getBlock(height_or_hash, True) if block1.number in block_cache: logger_err.error(f'获得重复区块高度 {block1.number}') block_cache[block1.number] = block1 return block_cache[block1.number] = block1
class Spider(object): def __init__(self, *, timeout=None, headers=cfg.default_headers, \ headers_path=None, keys=None, cache_path='html_cache',\ encoding=None): self.sess = requests.Session() self.timeout=timeout self.headers = self.make_headers(headers_path, keys) \ if isinstance(headers_path, str) else headers self.cache_path = cache_path self.cache = Cache(self.cache_path) self.encoding = encoding def make_headers(self, headers_path, keys): assert isinstance(keys, (list, set, None)) headers = {} keys = set(keys) if keys else keys with open(headers_path, 'r') as f: lines = f.readlines() for line in lines: x = line.find(':') if x != -1 and (not keys or line[:x] in keys): headers[line[:x]] = line[x+1:-1] return headers def get(self, *args, **kwargs): if 'headers' not in kwargs: kwargs['headers'] = self.headers if 'timeout' not in kwargs and self.timeout: kwargs['timeout'] = self.timeout try: response = self.sess.get(*args, **kwargs) except: return None if self.encoding: response.encoding = self.encoding return response def post(self, *args, **kwargs): if 'headers' not in kwargs: kwargs['headers'] = self.headers if 'timeout' not in kwargs and self.timeout: kwargs['timeout'] = self.timeout try: response = self.sess.post(*args, **kwargs) except: return None if self.encoding: response.encoding = self.encoding return response def html_save(self, response, name): self.cache.bin_save(response.content, name) def str_save(self, text, name): self.cache.str_save(text, name) def download(self, name, *args, **kwargs): self.html_save(self.get(*args, **kwargs), name) def login(self, url, success_judge, \ certcode_url=None, cache_name='./data', anonymous=False, \ **kwargs): from getpass import getpass _input = getpass if anonymous else input def get_certcode(url): print('[LOG] downloading certcode from %s ..' % url) name = 'certcode.jpg' certcode_path = os.path.join(self.cache_path, name) self.download(name, url) print('[LOG] try to import cv2..') try: import cv2 img = cv2.imread(certcode_path) if 'recognition' in kwargs: print('[LOG] use auto recognition instead!') return kwargs['recognition'](img, kwargs['pattern']) print('[SUC] successfully import cv2. please watch the image window and input certcode..') cv2.imshow(cache_name, img) cv2.waitKey(3000) except: print('[ERR] failed to import cv2. please input certcode from %s' % certcode_path) certcode = str(input('[I N] certcode: ')) return certcode print('[LOG] start to login %s ..' % url) information = Cache(cache_name) data = information.load('login') if not data: username = _input('[I N] username: '******'[I N] password: '******'[I N] tel: ') #### #### #### #### # change data format to what target website needs data = { 'nickName': username, 'password': password, 'logintype': 'PLATFORM', } #### #### #### #### information.save(tel, 'tel') information.save(data, 'login') certcode = get_certcode(certcode_url) if certcode_url else None #### #### #### #### # change data format to what target website needs if certcode: data['checkCode'] = certcode #### #### #### #### print('[LOG] hi, user %s.' % (data['nickName'] \ if not anonymous else 'anonymous')) response = self.post(url, data=data, headers=self.headers) self.html_save(response, 'login.html') return success_judge(response)
from collections import defaultdict from typing import Iterator, Dict, Set, List, Tuple import numpy as np # from keras.models import load_model from file_utils import GenomeReadDataReader from structures import GenomeReadData, GenomeReadCluster, ClusterMergeCase from utils import iter_with_progress, Cache KmerIndex = Dict[str, Set[int]] ClusterConnection = Tuple[int, int, int] # model = load_model('ecoli_model.hdf5') cache = Cache() def get_clusters_from_reads( reads: Iterator[GenomeReadData]) -> List[GenomeReadCluster]: cluster_id_counter = itertools.count() clusters = [] for read in reads: if not read.characteristic_kmers: continue clusters.append( GenomeReadCluster( reference_id=next(cluster_id_counter), characteristic_kmers=read.characteristic_kmers.copy(), reads=[read]))