def visit_FunctionDef(self, node): '''Visitor for AST FunctionDef nodes add relevant information about the node to the context for use in tests which inspect function definitions. Add the function name to the current namespace for all descendants. :param node: The node that is being inspected :return: - ''' self.context['function'] = node self.logger.debug("visit_FunctionDef called (%s)" % ast.dump(node)) qualname = self.namespace + '.' + b_utils.get_func_name(node) name = qualname.split('.')[-1] self.context['qualname'] = qualname self.context['name'] = name # For all child nodes and any tests run, add this function name to # current namespace self.namespace = b_utils.namespace_path_join(self.namespace, name) self.update_score(self.tester.run_tests(self.context, 'FunctionDef')) super(BanditNodeVisitor, self).generic_visit(node) self.namespace = b_utils.namespace_path_split(self.namespace)[0]
def tri_trend(df,args): fc_name=utils.get_func_name() target_name = 'Y_'+fc_name df[target_name]=1 N = args.get('N',20) pos_mdw=args.get('pos_mdw',5) neg_mdw=args.get('neg_mdw',10) pos_ma=args.get('pos_ma',5) neg_ma=args.get('neg_ma',-5) df.loc[:,target_name] = np.where(((df['Y_PMA_open_next_{}'.format(N)]>pos_ma) & (df['Y_MAXDRAWDOWN_open_next_{}'.format(N)]<pos_mdw)),2,df[target_name]) df.loc[:,target_name] = np.where(((df['Y_PMA_open_next_{}'.format(N)]<neg_ma) | (df['Y_MAXDRAWDOWN_open_next_{}'.format(N)]>neg_mdw)),0,df[target_name]) print('describe target distribution===========') print(df[target_name].value_counts()) return df
def market(raw): fc_name=utils.get_func_name() code = raw['code'] # sh:1 sz:2 cy:3 if code.startswith('60'): market = 1 if code.startswith('00'): market = 2 if code.startswith('30'): market = 3 try: raw['quotes'][fc_name] = market except: print(code) return raw
def _log_func_call(func, use_time, *func_args, **func_kwargs): arg_names = func.func_code.co_varnames[:func.func_code.co_argcount] args = func_args[:len(arg_names)] defaults = func.func_defaults or () args = args + defaults[len(defaults) - (func.func_code.co_argcount - len(args)):] params = zip(arg_names, args) args = func_args[len(arg_names):] if args: params.append(('args', args)) if func_kwargs: params.append(('kwargs', func_kwargs)) func_name = utils.get_func_name(func) func_call = u'{func_name}({params}) {use_time}ms'.format( func_name=func_name, params=', '.join('%s=%r' % p for p in params), use_time=use_time * 1000) app_logger.info(func_call)
def visit_FunctionDef(self, node): '''Visitor for AST FunctionDef nodes add relevant information about the node to the context for use in tests which inspect function definitions. Add the function name to the current namespace for all descendants. :param node: The node that is being inspected :return: - ''' self.context['function'] = node qualname = self.namespace + '.' + b_utils.get_func_name(node) name = qualname.split('.')[-1] self.context['qualname'] = qualname self.context['name'] = name # For all child nodes and any tests run, add this function name to # current namespace self.namespace = b_utils.namespace_path_join(self.namespace, name) self.update_scores(self.tester.run_tests(self.context, 'FunctionDef'))
def wrapper(*func_args, **func_kwargs): if not settings.CACHED_CALL: return func(*func_args, **func_kwargs) if namespace == 'views': if request.method == 'GET': url = urlparse.urlsplit(request.url) key = _generate_key(namespace, tag, url_path=url.path, url_query=url.query) else: return func(*func_args, **func_kwargs) elif namespace == 'funcs': params = '%s&%s' % (str(func_args), str(func_kwargs)) funcname = get_func_name(func) key = _generate_key(namespace, tag, funcname=funcname, params=params) try: data = redis.get(key) except Exception as e: app_logger.exception(e) return func(*func_args, **func_kwargs) else: if data is not None: app_logger.debug(u'data from cache:%r' % key) return cPickle.loads(data) else: start_time = time.time() result = func(*func_args, **func_kwargs) exec_time = (time.time() - start_time) * 1000 if exec_time > cached_over_ms: try: redis.set(key, cPickle.dumps(result), ex=expire) app_logger.debug(u'cached:%r' % key) except Exception as e: app_logger.exception(e) return result
def test_cached_call_funcs(app, utils, redis, caplog): from utils.cache import cached_call from utils.cache import _generate_key from utils import get_func_name # run fast, dont need to cache @cached_call(cached_over_ms=app.config['CACHED_OVER_EXEC_MILLISECONDS'], expire=app.config['CACHED_EXPIRE_SECONDS'], tag='', namespace='funcs') def no_cache(): time.sleep(0) return "no_cache" rv = no_cache() funcname = get_func_name(no_cache) params = '()&{}' key = _generate_key('funcs', '', funcname=funcname, params=params) print 'key:', key data = redis.get(key) assert data is None # over time need to cache @cached_call(cached_over_ms=app.config['CACHED_OVER_EXEC_MILLISECONDS'], expire=app.config['CACHED_EXPIRE_SECONDS'], tag='', namespace='funcs') def cache_it(): time.sleep(app.config['CACHED_OVER_EXEC_MILLISECONDS'] / 1000. + 1) return "cache_it" rv = cache_it() funcname = get_func_name(cache_it) params = '()&{}' key = _generate_key('funcs', '', funcname=funcname, params=params) print 'key:', key data = redis.get(key) assert data is not None cached_rv = cPickle.loads(data) print 'func rv:', rv, 'cached rv:', cached_rv assert rv == cached_rv cache_it() log = caplog.text.strip() assert 'data from cache' in log redis.delete(key) # cache with func params @cached_call(cached_over_ms=app.config['CACHED_OVER_EXEC_MILLISECONDS'], expire=app.config['CACHED_EXPIRE_SECONDS'], tag='', namespace='funcs') def cache_with_params(x, y, z): time.sleep(app.config['CACHED_OVER_EXEC_MILLISECONDS'] / 1000. + 1) return u"cache_with_params %s + %s = %s %s" % (x, y, x + y, z) rv = cache_with_params(1, 2, z='zzz') funcname = get_func_name(cache_with_params) params = "(1, 2)&{'z': 'zzz'}" key = _generate_key('funcs', '', funcname=funcname, params=params) print 'key:', key data = redis.get(key) assert data is not None cached_rv = cPickle.loads(data) print 'func rv:', rv, 'cached rv:', cached_rv assert rv == cached_rv cache_with_params(1, 2, z='zzz') log = caplog.text.strip() assert 'data from cache' in log redis.delete(key)
def pma20(raw): fc_name = utils.get_func_name() return __profit_maN(20, 'Y_' + fc_name)(raw)
def mdw20(raw): fc_name = utils.get_func_name() return __max_drawdownN(20, 'Y_' + fc_name)(raw)
def next_open_p(raw): fc_name=utils.get_func_name() df = raw['quotes'] df.loc[:,fc_name] = ((df.open_next/df.close - 1)*100).astype('float16') raw['quotes'] = df return raw
def open_next(raw): fc_name=utils.get_func_name() df = raw['quotes'] df.loc[:,fc_name] = df.open.shift(-1) raw['quotes'] = df return raw
def range(raw): fc_name=utils.get_func_name() df = raw['quotes'] df.loc[:,fc_name] = ((df.high -df.low)/df.open*100).astype('float16') raw['quotes'] = df return raw