Пример #1
0
def db_connect():
    host = config.get_config('database', 'db_host')
    user = config.get_config('database', 'db_user')
    passwd = config.get_config('database', 'db_password')
    name = config.get_config('database', 'db_name')
    port = config.get_config('database', 'db_port')
    conn = MySQLdb.connect(host=host, user=user, passwd=passwd, db=name, port=int(port), charset='utf8')
    conn.autocommit(True)
    return conn
Пример #2
0
def _connect_mysql():
    host = config.get_config('database', 'db_host')
    user = config.get_config('database', 'db_user')
    passwd = config.get_config('database', 'db_password')
    name = config.get_config('database', 'db_name')
    conn = MySQLdb.connect(host=host, user=user, passwd=passwd, db=name, port=3306, charset='utf8')
    conn.autocommit(True)
    db_cur = conn.cursor()
    return db_cur 
Пример #3
0
def redis_init():
    '''
    redis清空
    :return:
    '''
    redis_host = config.get_config('redis', 'host')
    redis_db = config.get_config('redis', 'database')
    redis_port = int(config.get_config('redis', 'port'))
    redis_shell = 'redis-cli -h %s -p %s -n %s' % (redis_host, redis_port, redis_db)
    init_shell = redis_shell + ' KEYS "*" | xargs ' + redis_shell + ' DEL'
    os.system(init_shell);
    print "这里面redis初始化"
Пример #4
0
def mongo_init():
    '''
    mongo初始化
    :return:
    '''
    mongo_host = config.get_config('mongo', 'db_host')
    mongo_db = config.get_config('mongo', 'db_name')
    mongo_port = int(config.get_config('mongo', 'db_port'))
    mongo_conn = 'mongo %s:%s/%s' % (mongo_host, mongo_port, mongo_db)
    init_shell = mongo_conn + " --quiet --eval 'db.dropDatabase();db.counter.insert({_id:\"apply_id\",req:NumberLong(0)});db.counter.insert({_id:\"serial_no\",req:NumberLong(0)})'"
    os.system(init_shell)
    print "这里面mongo初始化"
Пример #5
0
def perform_chi2test(analysis, pdf_family, scenario='all', **kwargs):

    lhapdf_config = config.get_config('lhapdf2')
    ana_config = config.get_config(analysis)

    kwargs['pdf_sets'] += lhapdf_config['pdf_families'][pdf_family]

    scenario_uncerts = ana_config['scenarios'].get(scenario, None)

    # Create Result array
    for scale in ana_config['theory'].as_list('scales'):

        npdf_sets = len(kwargs['pdf_sets'])
        results = ArrayDict(**{'alphas': numpy.zeros(npdf_sets),
                               'chi2': numpy.zeros(npdf_sets),
                               'ndof': numpy.zeros(npdf_sets)})

        cache_filepath = os.path.join(config.cache_chi2,
                                      analysis,
                                      '{}_{}_{}.npz'.format(
                                          pdf_family,
                                          scenario, scale))
        for i, pdf_set in enumerate(kwargs['pdf_sets']):
            meas = get_measurement(analysis, pdf_set, scale,
                                   scenario=scenario_uncerts)
            #Get data mask
            mask = meas.get_mask().copy()
            for cut in ana_config['cuts']:
                cut_arr = meas.get_source(cut).get_arr()
                min_val = float(ana_config['cuts'][cut]['min'])
                max_val = float(ana_config['cuts'][cut]['max'])
                #Cut min/max of cut_obs
                cut_mask = ((cut_arr < max_val)
                                & (cut_arr >= min_val))
                mask = mask*cut_mask
            meas.set_mask(mask)

            alphas = float(lhapdf_config['pdf_sets'][pdf_set]['alphas'])
            chi2nuis = Chi2Nuisance(meas)
            results['alphas'][i] = alphas
            results['chi2'][i] = chi2nuis.get_chi2()
            results['ndof'][i] = chi2nuis.get_ndof()
            # Save nuisance parameters
            #nuisance_filename = "{}/{}_{}_{}_{}.txt".format(
            #    config.output_nuisance, analysis, pdf_set,
            #    scenario, scale)
            #chi2nuis.save_nuisance_parameters(nuisance_filename)
            print results
            print chi2nuis.get_nuisance_parameters()
        results.save(filename=cache_filepath)
Пример #6
0
def db_init():
    host = config.get_config('database', 'db_host')
    user = config.get_config('database', 'db_user')
    passwd = config.get_config('database', 'db_password')
    name = config.get_config('database', 'db_name')
    port = config.get_config('database', 'db_port')
    # mysql_conn = 'mysql -h %s -u%s -p%s -P%s ' %(host,user,passwd,port)
    mysql_conn = 'mysql -h %s -u%s -P%s ' % (host, user, port)
    init_shell = mysql_conn + ' -N -s information_schema -e ' + '\"SELECT CONCAT(\'TRUNCATE TABLE \',TABLE_NAME,\';\') FROM TABLES WHERE TABLE_SCHEMA=\'' + name + '\'\"' + '|' + mysql_conn + ' -f ' + name
    os.system(init_shell)
    # 城市数据初始化
    city_init = mysql_conn+'-D'+name+' -e \'source '+ os.path.dirname(__file__)+"/../config/clb_city.sql\'"
    os.system(city_init)
    print "这里面数据库初始化"
Пример #7
0
    def __init__(self, analysis):
        super(DataProvider, self).__init__()
        self._ana_config = config.get_config(analysis)
        self._data_file = os.path.join(config.data_dir, analysis + '.npz')

        self._array_dict = arraydict.ArrayDict(self._data_file)

        self.parse_arraydict()
Пример #8
0
def mongo_cur(request):
    def fin():
        if mongo_client:
            mongo_client.close()

    request.addfinalizer(fin)
    mongo_db = config.get_config('mongo', 'db_name')
    mongo_conn = mongo_client[mongo_db]
    return mongo_conn
Пример #9
0
def _init(db_cur):
    #将自己的用户的company_id改掉
    phone = config.get_config('app','phone')
    query = '''update clb_user set company_id = 777 where telephone = %s''' % phone
    db_cur.execute(query)
    #修改该用户公司的配置,添加成本中心数量
    query = '''insert into clb_company_configuration (company_id, type, value) values (777, 'cost_center_num', 99)'''
    db_cur.execute(query) 
    #清理cost_center数据
    query = '''delete from clb_cost_center where company_id != 777''' 
    db_cur.execute(query) 
Пример #10
0
def get_measurement(analysis, pdf_set, scale, scenario='all'):
    data = DataProvider(analysis)
    theo = TheoryProvider(analysis, pdf_set, scale)
    meas = Measurement(sources=theo.sources + data.sources,
                       scenario=scenario,
                       pdf_set=pdf_set,
                       analysis=analysis,
                       pdf_config=config.get_config(
                           'lhapdf2')['pdf_sets'][pdf_set])

    return meas
Пример #11
0
def batch_processing(**kwargs):
    #Get all Analyses
    analyses = ['qcd11004v2']
    #All PDF Families
    lhapdf_config = config.get_config('lhapdf2')
    pdf_families = lhapdf_config['pdf_families'].keys()

    #Remove from kwargs to avoid ambougies calling
    kwargs.pop('pdf_family')
    kwargs.pop('analysis')

    for analysis in analyses:
        ana_config = config.get_config(analysis)
        scenarios = ana_config['scenarios'].keys()
        for pdf_family in pdf_families:
            for scenario in scenarios:
                perform_chi2test(analysis,
                                 pdf_family,
                                 scenario=scenario,
                                 **kwargs)
    pass
Пример #12
0
    def __init__(self, analysis, pdf_set, scale):

        super(TheoryProvider, self).__init__()
        self._analysis = analysis
        self._pdf_set = pdf_set
        self._scale = [float(item) / 10. for item in scale.split('_')]
        self._ana_config = config.get_config(analysis)
        self._table = self._ana_config['theory']['table']
        self._table_filepath = os.path.join(config.table_dir,
                                            self._ana_config['theory'][
                                                'table'] + '.tab')

        self._lhapdf_config = config.get_config('lhapdf2')['pdf_sets'][pdf_set]
        self._lhgrid_filename = self._lhapdf_config['lhgrid_file']

        # scale_str = '_'.join(str(x).replace('.','') for x in scale)
        self._cache_filepath = os.path.join(config.cache_theory,
                                            self._table,
                                            '{}_{}.npz'.format(pdf_set, scale))

        self._array_dict = None

        self._read_cached()
Пример #13
0
# -*- coding: utf-8 -*-

import numpy as np
from keras.models import load_model
import cv2
import config.config as config
import config.globalvar as var

img1 = cv2.imread(config.get_config("pred_image_path", "pred_image"), 0)

cv2.imshow("img1", img1)
img1 = cv2.resize(img1, (var.IMAGE, var.IMAGE))  #通过调整图像大小来对图像进行预处理

img1 = np.reshape(img1, var.IMAGE_RESHAPE).astype('float32')
img1 = abs(img1 - 255.0)
# print img1.shape

# 载入模型
model = load_model(config.get_config("result", "model"))
preds = model.predict(img1, verbose=0)
print preds[0]
labels = range(var.CATEGORY)
name = labels[np.argmax(preds)]
print name
cv2.waitKey(0)
Пример #14
0
import logging
import time

import torch
import torch.nn as nn

from fme.models.LGR.LGR import LGR
from fme.models.LGR.loss import LGR_loss
from fme.utils.checkpoint import Checkpointer
# from fme.data.dataloader import train_dataloder, val_dataloader 
# from data.dataloader import train_dataloader

from data.pickle_loader import train_dataloader 
from config.config import get_config

config, unused = get_config()

def train_model(model, loss, dataloader, optimizer, log_period):
    """
    train model in a single training epoch
    """

    model.train()
    loss.train()

    end = time.time()
    for iteration, data_batch in enumerate(dataloader):
        data_time = time.time() - end
        data_batch = {k: v.cuda(non_blocking=True).float() for k, v in data_batch.items()}
        data_batch["x_in"] = data_batch["x_in"].transpose(1, 2).unsqueeze(2)
        data_batch["y_in"] = data_batch["y_in"].transpose(1, 2)
Пример #15
0
def build_asset():

    global place_sr
    place_sr = 1.0
    state_graph = {}
    vis = set()
    env = CoreEnv()

    def dfs(u, fa, faop):

        if fa!=None and fa!=u:
            if fa not in state_graph.keys():
                state_graph[fa] = {}
            state_graph[fa][u] = faop

        if u in vis:
            return
        else:
            vis.add(u)
            print(u)

        if np.where(np.array(u,dtype=np.int)>inven_max)[0].shape[0] > 0:
            return

        # option 0 : get log
        for ln in log_num_list:
            env.set_state(u)
            env.step(0, log_num=ln)
            v = env.get_state()
            dfs(v, u, 0)
        
        for i in range(1, len(option_list)):
            env.set_state(u)
            env.step(i)
            v = env.get_state()
            dfs(v, u, i)

    root = tuple([0]*len(inventory_list))
    dfs(root, None, None)

    s2xy = S2xy_mapper(list(vis))
    V = []
    border_V = []
    for s in vis:
        if np.where(np.array(s,dtype=np.int)>inven_max)[0].shape[0]>0:
            border_V.append(s2xy(s))
        else:
            V.append(s2xy(s))
    G = {}
    for s in state_graph.keys():
        G[s2xy(s)] = {}
        for s1 in state_graph[s].keys():
            G[s2xy(s)][s2xy(s1)] = state_graph[s][s1]

    imsize = get_config()['imsize']
    S_size = get_config()['imsize']**2
    A_size = get_config()['A_size']
    T = np.zeros([S_size, A_size, S_size], dtype=np.float32)
    for s in G.keys():
        for s1 in G[s].keys():
            sn = s[0]*imsize + s[1]
            s1n = s1[0]*imsize + s1[1]
            if G[s][s1]==0:
                delta = np.array(s2xy.inven_set[s1n],dtype=np.int) - np.array(s2xy.inven_set[sn],dtype=np.int)
                dlog = delta[0]
                T[sn][0][s1n] = log_pro[dlog]
                T[sn][0][sn] = log_pro[0]
            elif G[s][s1]==4 or G[s][s1]==5:
                T[sn][G[s][s1]][s1n] = 0.8
                T[sn][G[s][s1]][sn] = 0.2
            else:
                T[sn][G[s][s1]][s1n] = 1.0
    for s in range(T.shape[0]):
        for a in range(T.shape[1]):
            sump = np.sum(T[s,a,:])
            assert(np.isclose(sump,0) or np.isclose(sump,1))
            if sump==0:
                T[s,a,s] = 1.0

    torch.save({'state_graph':G,'V':V,'inven_set':s2xy.inven_set,'T':T,'border_V':border_V}, 'environment/asset/'+get_config()['task_name'])
Пример #16
0
 def __init__(self):
     self.cf = get_config()
     ckp = torch.load('environment/asset/'+self.cf['task_name'])
     self.s2xy = S2xy_mapper(ckp['inven_set'])
     self.mg = MapGenerator(ckp['state_graph'], ckp['V'], ckp['T'], self.s2xy, ckp['border_V'])
     self.core = CoreEnv()
Пример #17
0
 def __init__(self):
     self.cf = get_config()
     self.mg = MapGenerator(
         G=torch.load('environment/asset/' +
                      self.cf['task_name'])['state_graph'])
     self.core = CoreEnv()
Пример #18
0
 def __init__(self, G):
     self.cf = get_config()
     self.G = G
Пример #19
0
# coding: utf-8

import cv2
import os
import numpy as np
from keras.models import load_model
import config.config as config
import config.globalvar as var

#数据处理
X = []
Y = []
path = config.get_config("1679", "path")


def getImg(path):
    parents = os.listdir(path)
    for parent in parents:
        # print parent
        child = os.path.join(path, parent)
        # print(child)
        if os.path.isdir(child):
            Y.append(parent)
            getImg(child)
            # print(child)
        else:
            # print child
            img = cv2.imread(child, cv2.IMREAD_GRAYSCALE)
            img = cv2.resize(img, (var.IMAGE, var.IMAGE))
            X.append(img)
Пример #20
0
def lambda_handler(event, context):
    since_date = "2019-01-01"
    fetcher = Fetcher()
    dynamodb = boto3.resource('dynamodb')
    table = dynamodb.Table('TweetSecond')

    queries = get_config(config_file_path="queries/queries.yml")
    screen_names = queries["users"]
    tags = queries["tags"]

    # shuffle lists
    random.shuffle(tags)
    random.shuffle(screen_names)

    fetched_tweets = []
    unique_ids = []

    for screen_name in screen_names:
        latest_user_tweet_id = None
        try:
            result = table.query(
                IndexName="user_screen_name-id_str-index",
                KeyConditionExpression=Key('user_screen_name').eq(screen_name),
                ScanIndexForward=False,
                Limit=1)
            latest_user_tweet_id = int(result["Items"][0]["id_str"])
        except Exception as e:
            print(e)

        print("Latest user tweet id: " + str(latest_user_tweet_id))

        currently_fetched_tweets = fetcher.get_user_timeline_tweets(
            screen_name=screen_name, since_id=latest_user_tweet_id)
        for tweet in currently_fetched_tweets:
            tweet_dict = tweet._json
            tweet_dict["user_screen_name"] = screen_name
            tweet_dict["hashtag"] = "None"
            tweet_dict = json.loads(json.dumps(tweet_dict),
                                    parse_float=Decimal)
            tweet_dict = remove_nones(tweet_dict)

            if tweet_dict["id_str"] not in unique_ids:
                fetched_tweets.append(tweet_dict)
                unique_ids.append(tweet_dict["id_str"])

    for tag in tags:
        results = []
        latest_tag_tweet_id = None
        try:
            result = table.query(IndexName="hashtag-id_str-index",
                                 KeyConditionExpression=Key('hashtag').eq(tag),
                                 ScanIndexForward=False,
                                 Limit=1)
            latest_tag_tweet_id = int(result["Items"][0]["id_str"])
        except Exception as e:
            print(e)
        if results:
            print("DynamoDB query results: " + str(results))
        print("Latest user tweet id: " + str(latest_tag_tweet_id))

        currently_fetched_tweets = fetcher.get_tweets_by_term(
            term=tag, since_id=latest_tag_tweet_id, since=since_date)
        for tweet in currently_fetched_tweets:
            tweet_dict = tweet._json
            tweet_dict["user_screen_name"] = "None"
            tweet_dict["hashtag"] = tag
            tweet_dict = json.loads(json.dumps(tweet_dict),
                                    parse_float=Decimal)
            tweet_dict = remove_nones(tweet_dict)

            if tweet_dict["id_str"] not in unique_ids:
                fetched_tweets.append(tweet_dict)
                unique_ids.append(tweet_dict["id_str"])

    print("Writing to DynamoDb")
    # write to DynamoDB
    with table.batch_writer() as batch:
        for tweet in fetched_tweets:
            batch.put_item(Item=tweet)
    print("Finished writing to DynamoDb")
Пример #21
0
def redis_connect():
    redis_host = config.get_config('redis', 'host')
    redis_db = config.get_config('redis', 'database')
    redis_port = int(config.get_config('redis', 'port'))
    redis_conn = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db)
    return redis_conn
Пример #22
0
def mongo_connect():
    mongo_host = config.get_config('mongo', 'db_host')
    mongo_port = int(config.get_config('mongo', 'db_port'))
    mongo_client = pymongo.MongoClient(host=mongo_host, port=int(mongo_port))
    return mongo_client
Пример #23
0
    # set random seed
    if cfg.random_seed == 0:
        cfg.random_seed = random.randint(1, 10000)
        print('random seed set to {}'.format(cfg.random_seed))
        utils.flush()
    random.seed(cfg.random_seed)
    np.random.seed(cfg.random_seed)
    torch.manual_seed(cfg.random_seed)

    # set device as cuda or cpu
    if cfg.device.lower() == 'cuda' and torch.cuda.is_available():
        # reproducibility using cuda
        torch.cuda.manual_seed(cfg.random_seed)
        cudnn.deterministic = True
        cudnn.benchmark = False
    else:
        if cfg.device.lower() == 'cuda':
            print(
                'device option was set to <cuda>, but no cuda device was found'
            )
            utils.flush()
            cfg.device = 'cpu'

    trainer = Trainer(cfg)
    trainer.train()


if __name__ == '__main__':
    cfg, unparsed = get_config()
    main(cfg)
Пример #24
0
def configure_app(app):
    app.config.from_object(get_config())
    app.config.from_pyfile(get_env())
    return app
Пример #25
0
import sqlite3
from loguru import logger
import json
import traceback
from config import config
from models.bakchod import Bakchod
from models.group import Group

api_config = config.get_config()

chaddi_db = sqlite3.connect(api_config["db"]["uri"], check_same_thread=False)
c = chaddi_db.cursor()


def get_all_quotes():

    all_quotes = None

    try:
        c.execute("""SELECT * FROM quotes""")
        query_result = c.fetchall()

        if query_result is not None:

            all_quotes = []

            for q in query_result:
                quote = {}
                quote["id"] = q[0]
                quote["message"] = json.loads(q[1])
                quote["user"] = q[2]
Пример #26
0
from agent import optimization_agent
from agent import rollout_master_agent
from config.config import get_config
from util import csv_util
from util import parallel_util
from util import logger
import multiprocessing
import time
from environments import register
init_path.bypass_frost_warning()

if __name__ == '__main__':
    # get the configuration
    logger.info('New environments available : {}'.format(
        register.get_name_list()))
    args = get_config()
    args.max_pathlength = gym.spec(args.task).timestep_limit
    learner_env = gym.make(args.task)

    if args.write_log:
        logger.set_file_handler(
            path=args.output_dir,
            prefix='mujoco_' + args.task, time_str=args.time_id
        )

    learner_tasks = multiprocessing.JoinableQueue()
    learner_results = multiprocessing.Queue()
    learner_agent = optimization_agent.optimization_agent(
        args,
        learner_env.observation_space.shape[0],
        learner_env.action_space.shape[0],
Пример #27
0
 def __init__(self, target1, target2):
     self.target1 = target1
     self.target2 = target2
     self.csrf_query = get_config('csrf.property', 'query', 'csrf_name')
     self.csrf_header = get_config('csrf.property', 'headers', 'csrf_name')
     self.csrf_body = get_config('csrf.property', 'body', 'csrf_name')
Пример #28
0
def user_init():
    phone = config.get_config('app', 'phone')
    url = config.get_config('app', 'host') + '/ucenter/captcha/company_trial'
    data = {'telephone': phone}
    headers = {'Encryption': 'CLB_NONE', 'Agent': '(IOS;1.0.0;IPhone)', 'VersionCode': '5.0.0'}
    res = requests.post(url, data=data, headers=headers)
    expact_data = {
        "status": 0,
        "message": "验证码已发送",
        "data": []
    }
    assert byteify(json.loads(res.text)) == expact_data
    captcha = redis_con.hgetall('COMPANY_TRIAL' + phone)
    print "验证码:" + captcha['code']
    url = config.get_config('app', 'host') + '/ucenter/company/create'
    principal = "丁守坤"
    data = {'fullname_zh': "全程费控公司", 'principal': principal, 'telephone': phone, 'code': captcha['code'],
            'email': '*****@*****.**', 'shortname': '费控', 'license': ''}
    headers = {'Encryption': 'CLB_NONE', 'Agent': '(IOS;1.0.0;IPhone)', 'VersionCode': '5.0.0', 'X-From': 'www'}
    company = requests.post(url, data=data, headers=headers)
    company_id = json.loads(company.text)['data']['company_id']
    assert company_id == 1
    print  "休息下让队列跑一会"
    time.sleep(15)
    url = config.get_config('app', 'host') + '/ucenter/login'
    data = {'username': str(phone), "password": str(phone)[-6:]}
    headers = {'Encryption': 'CLB_NONE', 'Agent': '(IOS;1.0.0;IPhone)', 'VersionCode': '5.0.0'}
    res = requests.post(url, data=data, headers=headers)
    assert byteify(json.loads(res.text))['status'] == 0
    global_params.token = byteify(json.loads(res.text))['data']['token']
    print "token:" + global_params.token
    print "插入管控后台用户并设置给与权限"
    with db:
        db_cur = db.cursor()
        db_cur.execute(
            "INSERT INTO `clb_user` VALUES  (null, '18616369918', '27345eea93fa977403c9f0e4471638b8', '18616369918', '*****@*****.**', '组长', '丁守坤管控', '1', null, '0', '2', '0', '0', null, '2016-06-24 14:21:00', '2016-06-24 14:20:53', null, '1', null, '12')")
        op_uid = int(db_cur.lastrowid)
        db_cur.execute('INSERT INTO `clb_role`(`user_id`,`role`, `item_id`,`item_type`) VALUES(%s,%s,%s,%s)',[op_uid,16,company_id,15])

    print '管控用户登录'
    data = {'username': str('18616369918'), "password": str(phone)[-6:]}
    res = requests.post(url, data=data, headers=headers)
    assert byteify(json.loads(res.text))['status'] == 0
    token = byteify(json.loads(res.text))['data']['token']

    print '管控用户开通企业'
    url = config.get_config('app', 'host') + '/ucenter/company/open'
    headers['token'] = token
    data = {'company_id': company_id,'principal':principal,'telephone':phone,'email':'*****@*****.**','fullname_zh':'全程费控公司','surl':'abc','paying_type':1,\
    'enable_book_flight':2,'hotel_book_status':0,'cost_num':3,'expire_time':'2017-06-23','address':'233','phone':'323','header':'qcfk','license':'',\
    'shortname':'qcfk','fullname_en':'','origin':0,'attachment':'','company_certify':1,'status':0,'certified':0,'manager':'','create_at':'',\
    'domain':'http://img.qccost.com/'}
    res = requests.post(url, data=data, headers=headers)
    assert byteify(json.loads(res.text))['status'] == 0
    print '企业开通成功'
    log('test', "正式用户开始登陆")
    url = config.get_config('app', 'host') + '/ucenter/login'
    data = {'username': str(phone), "password": str(phone)[-6:]}
    headers = {'Encryption': 'CLB_NONE', 'Agent': '(IOS;1.0.0;IPhone)', 'VersionCode': '5.0.0'}
    res = requests.post(url, data=data, headers=headers)
    assert byteify(json.loads(res.text))['status'] == 0
    global_params.token = byteify(json.loads(res.text))['data']['token']
    log('test', "token:" + global_params.token)
    log('test', "正式用户登陆成功")
Пример #29
0
context_processors.apply(app)

#REGISTER ALL BLUEPRINTS
messages.add_routes(app)
deals.add_routes(app)
signup.add_routes(app)
upload.add_routes(app)
api.add_routes(app)
faq.add_routes(app)

#ADD ERROR HANLDING CAPABILITY
error_handlers.apply_error_handlers(app)

auth0 = oauth.register(
    'auth0',
    client_id=config.get_config("auth_client_id"),
    client_secret=config.get_config("auth_client_secret"),
    api_base_url=config.get_config("auth_api_base_url"),
    access_token_url=config.get_config("auth_access_token_url"),
    authorize_url=config.get_config("auth_authorize_url"),
    client_kwargs={
        'scope': 'openid profile',
    },
)
app.config.update(
    MAIL_SERVER=config.get_config("MAIL_SERVER"),  #''smtp.gmail.com',
    MAIL_PORT=int(config.get_config("MAIL_PORT")),  #465,
    MAIL_USE_SSL=bool(config.get_config("MAIL_USE_SSL")),
    MAIL_USERNAME=config.get_config(
        "MAIL_USERNAME"),  #''*****@*****.**',
    MAIL_PASSWORD=config.get_config("MAIL_PASSWORD"),
Пример #30
0
print labels
# {'1': 0, '9': 3, '7': 2, '6': 1}
#加载模型
# base_model = VGG16(weights='imagenet',pooling='avg',include_top=False)
# #base_model = InceptionV3(include_top=False,pooling='avg')
#
# #添加几全链接层
# x = base_model.output
# x = BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
# x = Dense(1024, activation='relu', name='fc1')(x)
# x = Dense(1024, activation='relu', name='fc2')(x)
# x = Dense(1024, activation='relu', name='fc3')(x)
# predictions = Dense(2, activation='softmax')(x)
# # 定义模型
# model = Model(inputs=base_model.input, outputs=predictions)
base_model = load_model(config.get_config("result","model_1"))
x = base_model.output
# x = BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(x)
x = Dense(var.FILTER_4, activation='relu', name='fc1')(x)
predictions = Dense(var.CATEGORY_4, activation='softmax',name='dense_x')(x)
model = Model(inputs=base_model.input, outputs=predictions)
for i, layer in enumerate(model.layers):
   print(i, layer.name,layer.output_shape)

#
# for i, layer in enumerate(model.layers):
#    print(i, layer.name,layer.output_shape)
# #冻结
# for layer in base_model.layers:
#     layer.trainable = False
Пример #31
0
def login():
    redirect_uri = 'http://%s:%s/callback' % (config.get_config("server"),
                                              config.get_config("server_port"))
    return auth0.authorize_redirect(
        redirect_uri=redirect_uri,
        audience=config.get_config("auth_api_base_url") + '/userinfo')
Пример #32
0
 def __init__(self):
     self.cf = get_config()
     self.reset()
Пример #33
0
 def __init__(self):
     self.Config = config.get_config() # loads the config at start
     self.RootDir = RootDir
     self.browser = Browser.create()
import torch.utils.data as data_utils
import torchvision.transforms as transforms

# local imports
from config import config
from data import data

from util import logger
from util import setup
from util import train
from util import visdom_util


if __name__ == '__main__':

    args = config.get_config()

    # set up visdom
    visdom_util.visdom_initialize(args)

    # set up dataset loader
    trn_loader, val_loader, tst_loader = setup.setup_dataset(args)

    # set up model
    model = setup.setup_model(args)

    # set up criterions
    crit = setup.setup_criterion(args)

    # set up optimizer
    optimizer = setup.setup_optimizer(model, args)
 def __init__(self):
     config = get_config()
     self.n_class = config.n_class
Пример #36
0
 def __init__(self, inven_set):
     self.inven_set = inven_set
     self.imsize = get_config()['imsize']
Пример #37
0
        else:
            node_param['joint'][i - 1] = np.array([
                node_attr['a_size'], node_attr['b_size'], node_attr['c_size'],
                node_attr['u'], node_attr['v'], node_attr['axis_x'],
                node_attr['axis_y'], node_attr['joint_range']
            ])

    return node_param, param_size_dict


if __name__ == '__main__':
    pass

    N = 5
    from config.config import get_config
    args = get_config(evolution=True)

    # get the
    adj_mat = model_gen.gen_test_adj_mat(task=args.task, shape=(N, N))
    node_attr = model_gen.gen_test_node_attr(task=args.task, node_num=N)
    if 'fish' in args.task:
        xml_struct = model_gen.fish_xml_generator(adj_mat,
                                                  node_attr,
                                                  options=None,
                                                  filename='debug_gnn_param')
    elif 'walker' in args.task:
        xml_struct = model_gen.walker_xml_generator(adj_mat,
                                                    node_attr,
                                                    options=None,
                                                    filename='debug_gnn_param')
    res = gen_gnn_param(args.task, adj_mat, node_attr)
Пример #38
0
def plot(analysis, **kwargs):

    lhapdf_config = config.get_config('lhapdf2')
    ana_config = config.get_config(analysis)

    if kwargs['pdf_family'] is not None:
        kwargs['pdf_sets'] += lhapdf_config['pdf_families'][
            kwargs['pdf_family']]

    for scale in ana_config['theory'].as_list('scales'):
        # Different type of plots
        if kwargs['plot'] == 'ratio':
            measurements = []
            for pdf_set in kwargs['pdf_sets']:
                measurements.append(
                    get_measurement(analysis, pdf_set, scale))

            bin1 = measurements[0].get_bin('y')
            bin1_unique = measurements[0].get_unique_bin('y')
            # b = numpy.ascontiguousarray(bin1).view(numpy.dtype((numpy.void,
            #                             bin1.dtype.itemsize * bin1.shape[1])))
            # _, idx = numpy.unique(b, return_index=True)
            # unique_a = bin1[idx]
            for bin in bin1_unique:
                mask = (bin1 == bin).T[0]
                # mask = (ylowbin == ylow)
                # Apply mask to all measurements
                map(lambda x: x.set_mask(mask), measurements)
                output_fn = '{}/ratio/{}_{}_{}_{}_{}'.format(config.output_plots,
                                                             analysis,
                                                             measurements[0].pdf_set,
                                                             scale, *bin)
                dt_plot = DataTheoryRatio(measurements, output_fn=output_fn,
                                          output_ext=['pdf', 'png'])
                dt_plot.do_plot()

        elif kwargs['plot'] == 'asratio':
            measurements = []
            for pdf_set in kwargs['pdf_sets']:
                measurements.append(
                    get_measurement(analysis, pdf_set, scale))

            # Split according the y bins
            bin1 = measurements[0].get_bin('y')
            bin1_unique = measurements[0].get_unique_bin('y')
            # b = numpy.ascontiguousarray(bin1).view(numpy.dtype((numpy.void,
            #                             bin1.dtype.itemsize * bin1.shape[1])))
            # _, idx = numpy.unique(b, return_index=True)
            # unique_a = bin1[idx]
            for bin in bin1_unique:
                mask = (bin1 == bin).T[0]
                # mask = (ylowbin == ylow)
                # Apply mask to all measurements
                map(lambda x: x.set_mask(mask), measurements)
                output_fn = '{}/asratio/{}_{}_{}_{}_{}'.format(config.output_plots,
                                                               analysis,
                                                               kwargs['pdf_family'],
                                                               scale,
                                                               *bin)
                as_plot = AlphasSensitivityPlot(measurements,
                                                output_fn=output_fn,
                                                output_ext=['pdf', ])
                as_plot.do_plot()

        elif kwargs['plot'] == 'chi2':
            output_fn = '{}/chi2/{}_{}_{}_{}.pdf'.format(
                config.output_plots,
                analysis,
                kwargs['pdf_family'],
                kwargs['scenario'],
                scale)
            chi2_plot = Chi2Distribution(analysis,
                                         kwargs['pdf_family'],
                                         kwargs['scenario'],
                                         scale,
                                         output_fn=output_fn,
                                         output_ext=['png', ])
            chi2_plot.do_plot()
Пример #39
0
model.add(MaxPooling2D(strides=var.STRIDES, padding='same'))
model.add(BatchNormalization())
model.add(
    Conv2D(var.FILTER_3, (var.KERNEL_SIZE, var.KERNEL_SIZE),
           padding='same',
           activation='relu'))
model.add(MaxPooling2D(strides=var.STRIDES, padding='same'))
model.add(Flatten())
model.add(Dense(var.FILTER_5, activation="relu"))
model.add(Dropout(var.RATE))
model.add(Dense(var.FILTER_4, activation="relu"))
model.add(Dropout(var.RATE))
model.add(Dense(var.CATEGORY, activation="softmax"))
print(model.summary())
'''
三个参数:损失函数   优化器     指示指标
'''
model.compile(loss='categorical_crossentropy',
              optimizer='sgd',
              metrics=['accuracy'])
'''
训练模型
'''

model.fit(X_train, y_train, verbose=1, validation_data=(X_test, y_test))

model.save(config.get_config("result", "model"))

score = model.evaluate(X_test, y_test)
print('score:', score)
Пример #40
0
from mysql import connector
from config.config import get_config
sql_conn = connector.connect(
    user=get_config('mysql', 'username'),
    passwd=get_config('mysql', 'password'),
    host=get_config('mysql', 'sql_host'),
    database=get_config('mysql', 'database'),
    port=get_config('mysql', 'port')
)
cursor = sql_conn.cursor(buffered=True)
Пример #41
0
from config.config import get_config
from db.connector.connection import connect_to_db
from db.models.user import User
from db.models.category import Category
from db.models.products import Products
from db.models.discount import Discount
from db.models.orders import Orders
from db.models.userCart import User_cart
from db.models.cartInfo import Cart_info
from datetime import datetime
import uuid

config_path = "/home/diksha/Interview/Work/MyCart-App/config/configuration.json"

if __name__ == "__main__":
    config = get_config(config_path)
    db_conn = connect_to_db(config["db"], config["host"], config["user"],
                            config["password"])

    # User Operations
    # CREATE
    user = User(db_conn,
                first_name="Saavi",
                last_name='Sirsat',
                email='saavisirsatgmail.com',
                contact_number=1234567890,
                address='Pune',
                postal_code='411031',
                password='******',
                user_type=1)
    user.create_user()
from data.dataloader import get_loader
from src.train import Trainer


def main(config):
    if config.checkpoint_dir is None:
        config.checkpoint_dir = 'checkpoints'
    os.makedirs(config.checkpoint_dir, exist_ok=True)
    os.makedirs(config.sample_dir, exist_ok=True)

    # config.manual_seed = random.randint(1, 10000)
    # print("Random Seed: ", config.manual_seed)
    # random.seed(config.manual_seed)
    # torch.manual_seed(config.manual_seed)

    # if torch.cuda.is_available():
    #     torch.cuda.manual_seed_all(config.manual_seed)

    # cudnn.benchmark = True

    data_loader = get_loader(config.data_path, config.batch_size, config.image_size,
                            shuffle=True, num_workers=int(config.workers))

    trainer = Trainer(config, data_loader)
    trainer.train()


if __name__ == "__main__":
    config = get_config()
    main(config)
Пример #43
0
def cb_getconf():
    return '<h2>Configuration</h2><p>%s</p>' % json.dumps(
        config.get_config(None))
Пример #44
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2016 Clay L. McLeod <*****@*****.**>
#
# Distributed under terms of the MIT license.

from __future__ import print_function

from config import config
from tools.datatools import datatools

_config = config.get_config()
data_dir = _config['data_dir']
datatools.convert_gen_to_out(data_dir)
Пример #45
0
 def __init__(self, app):
     self.config_dc = cfg.get_config()
     self.app = app
     self.session = app.session(
         self.config_dc["jiuyin_pkg_name"])  # 启动应用并获取session
Пример #46
0
def _init_config(app):
    """
    Method to initialize the configuration
    """
    env = os.getenv("FLASK_ENV", ConfigName.DEV.value)
    app.config.from_object(get_config(env))
Пример #47
0
 def __init__(self, main_net, eps=True):
     self.cf = get_config()
     self.main_net = main_net
     if eps:
         self.eps = self.cf['epsilon_max']
         self.eps_delta = (self.cf['epsilon_max']-self.cf['epsilon_min'])/self.cf['epsilon_decrease_step']