def __init__(self): self.logger = log.SysLogger().log self.platform = Config().get_config('wyt')['platform'] self.app_key = Config().get_config('wyt')['app_key'] self.token = Config().get_config('wyt')['token'] self.client_id = Config().get_config('wyt')['client_id'] self.client_secret = Config().get_config('wyt')['client_secret']
def login_session(): base_url = 'http://139.196.109.214/index.php/myibay/login/redirect/%252Findex.php%252Fmyibay' config = Config() payload = config.get_config(f'ibay_user_info') session = requests.Session() session.post(base_url, data=payload) return session
def __init__(self): super().__init__() self.base_url = "https://oms.goodcang.net/default/svc/web-service" self.app_key = Config().get_config('gucang')['app_key'] self.token = Config().get_config('gucang')['token'] self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name)
def __init__(self): super().__init__() self.base_url = "http://openapi.winit.com.cn/openapi/service" self.app_key = Config().get_config('gucang')['app_key'] self.token = Config().get_config('gucang')['token'] self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name)
def __init__(self, account): self.logger = log.SysLogger().log self.api_name = Config().get_config('ali')['api_name'] self.app_key = Config().get_config('ali')['app_key'] self.app_secret_key = Config().get_config('ali')['app_secret_key'] self.refresh_token = Config().get_config( 'ali')['refresh_token'][account] self.token = self._get_access_token()
def __init__(self, rule_id=None): super().__init__() self.rule_id = rule_id self.headers = headers config = Config() self.haiying_info = config.get_config('haiying') self.mongo = MongoClient('192.168.0.150', 27017) self.mongo = motor.motor_asyncio.AsyncIOMotorClient('192.168.0.150', 27017) self.mongodb = self.mongo['product_engine']
def __init__(self): print('Iniciando Classifier') self.vocabulary = [] self.modules = {} self.input_vector = [] self.output_vector = [] self.config = Config() self.translator = YandexTranslate(self.config.get_token_yandex()) self.dao = Mongo_DAO('localhost', 27017, 'classifier')
def __init__(self,tupianku_name=1): super().__init__() config = Config() self.tupianku_name = tupianku_name self.tupianku_info = config.get_config(f'tupianku{tupianku_name}') # self.proxy_url = "http://127.0.0.1:1080" self.proxy_url = None self.session = aiohttp.ClientSession() self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name)
def main(): env_names = [ 'FetchReach-v1', 'FetchPush-v1', 'FetchPickAndPlace-v1', 'FetchSlide-v1' ] env = gym.make(env_names[1]) #env.render() config = Config() config.render = False config.max_steps = env.spec.timestep_limit env_params = get_env_params(env) print(env_params) agent = DDPGAgent(config, env, env_params, her_sampler) agent.learn()
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.mongo = MongoClient('192.168.0.150', 27017) self.mongodb = self.mongo['operation'] self.col = self.mongodb['ebay_description_template'] self.col1 = self.mongodb['ebay_description_group']
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.batch_id = str(datetime.datetime.now() - datetime.timedelta(days=7))[:10] self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name)
def make_config(self, z_dim=256, rec_x_weight=300, evaluation_verbose=False): config = { "privater": { "type": "vae", "z_dim": z_dim, "rec_x_weight": rec_x_weight, "encrypt_with_noise": True, "optimizer": { "type": "adam", "lr": 0.0003, } }, "dataset": { "type": "ferg" }, "trainer": { "type": "keras", "epochs": 50, "save_model": True }, "evaluaters": [{ "type": "utility", "z_dim": z_dim, "verbose": evaluation_verbose }, { "type": "private", "z_dim": z_dim, "verbose": evaluation_verbose }] } return Config(config)
def main(args): if len(args) == 1: # TODO: launch GUI print("GUI Launch(TODO)") pass else: # Parse Command Arguments argdict = {} for i in range(1, len(args)): if args[i] == '-run': if i == len(args) - 1: print("! Error: no input task.") return task = args[i + 1] if not os.path.exists(task): print("! Error: task not exist") return argdict['run'] = task continue if args[i] == '-load': if i == len(args) - 1: print("! Error: No asm testcase input.") return testcase = args[i + 1] if not os.path.exists(testcase): print("! Error: asm testcase not exist") return argdict['load'] = testcase if 'load' in argdict: Testcase.importAsm(argdict['load'],Config.getValue('configs/global.json', 'defaultTestcasePath')) if 'run' in argdict: jg = Judger(argdict['run']) jg.judge() pass
class Jira_Api: def __init__(self): print('Iniciando Jira_Api') self.config = Config() def get_text(self, key): print('Iniciando busca em ' + key) url = self.config.get_url_issue().replace('{issueIdOrKey}', key) response = requests.get(url, auth=HTTPBasicAuth(self.config.get_email(), self.config.get_password())) jiras = response.json() description = jiras["fields"][ "description"] if jiras["fields"]["description"] != None else "" return jiras["fields"]["summary"] + " " + description
def make_config(self, NAME): config = { "privater": { "type": "ad_vae", "z_dim": z_dim, "rec_x_weight": 64 * 64 * 3, "prior_weight": 1, "encrypt_with_noise": True, "optimizer": { "type": "adam", "lr": 0.0003, } }, "dataset": { "type": "ferg" }, "trainer": { "type": "adv", "d_iter": 2, "epochs": 100 }, "evaluaters": [{ "type": "utility", "z_dim": z_dim, "verbose": evaluation_verbose }, { "type": "private", "z_dim": z_dim, "verbose": evaluation_verbose }, { "type": "reconstruction", "base_dir": NAME }] } return Config(config)
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name) self.col = self.get_mongo_collection('operation', 'ebay_product_list')
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.task = self.get_mongo_collection('operation', 'wish_off_shelf_task') self.product_list = self.get_mongo_collection('operation', 'wish_products')
def make_config(self, NAME): config = { "privater": { "type": "cvae_mi", "z_dim": z_dim, "global_weight": 1, "rec_x_weight": 10, "local_weight": 1, "encrypt_with_noise": True, "optimizer": { "type": "adam", "lr": 0.0003, } }, "dataset": { "type": "ferg" }, "trainer": { "type": "keras", "epochs": 100 }, "evaluaters": [{ "type": "private", "z_dim": z_dim, "verbose": evaluation_verbose }, { "type": "reconstruction", "base_dir": NAME }] } return Config(config)
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.batch_id = '2020-08-01' self.base_name = 'mssql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name)
def create_app(): app = Flask(__name__) app.config.from_object(Config) # 配置日志 Config.init_app(app) # app 初始化 db db.init_app(app) # 注册蓝本 main from .api_www import api_bp as api_www_v1 app.register_blueprint(api_www_v1, url_prefix='/wwwapi/v1') # 附加路由和自定义页面 return app
class iVerilog_Runner(Runner): config = Config.getConfig("configs/simulator/iverilog.json") def __init__(self, src, path): super().__init__(src, path) if "iVerilog-Path" in self.config: self.iv_path = self.config["iVerilog-Path"] else: self.iv_path = None def compile(self): if self.iv_path == None: IO.writestr("! Runner(iverilog).run: iverilog not found.") return False iverilog = (self.iv_path + os.sep if self.iv_path else "") + "iverilog" src_unzip = self.path + "/src_unzip" mips = " ".join(self.v_list) # print(self.v_list) r = os.system("cd {src} && {iverilog} -o mips.vvp tb.v {mips}".format(src=src_unzip, iverilog=iverilog,mips=mips)) if r != 0: IO.writestr("! Runner(iverilog).run: Error Occured on iVerilog Compiling") return False return True def run(self, testcase, out): r = super().run(testcase, out) if not r: return False # iverilog run vvp = (self.iv_path + os.sep if self.iv_path else "") + "vvp" r = os.system("cd {path} && {vvp} src_unzip/mips.vvp > out/{out}".format(path=self.path, vvp=vvp, out=out)) if r != 0: IO.writestr("! Error Occured on iVerilog Running") return False return True
def loadTestSet(self): globalTestcases = Config.getValue('configs/testcase.json', 'testcases') self.testcaseSet = [] for test in self.task['testcases']: for stdtest in globalTestcases: if test == stdtest['name']: self.testcaseSet.append(Testcase.loadFrom(stdtest['path'])) break
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.base_name = 'mssql' self.today = datetime.datetime.today() - datetime.timedelta(hours=8) self.log_type = {1: "刊登商品", 2: "添加多属性"} self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name) self.tokens = self.get_tokens()
def __init__(self, model_name, use_bert, bert_type=1, max_len_bert=None, bert_trainable=False, bert_config_file=None, bert_model_file=None, feature=False, swa=True, seed=42, columns='title', computers=False): self.config = Config() self.model_name = model_name self.use_bert = use_bert self.bert_type = bert_type self.bert_trainable = bert_trainable self.feature = feature self.store_name = model_name if self.use_bert: if max_len_bert > 100: self.config.batch_size = 16 else: self.config.batch_size = 32 self.store_name += '_bert%d' % self.bert_type if not bert_trainable: self.store_name += '_fix' if self.feature: self.store_name += '_f' if computers: self.store_name += '_computers' self.seed = seed if isinstance(columns, list): columns = '_'.join(columns) self.columns = columns self.config.checkpoint_dir = os.path.join(self.config.checkpoint_dir, columns) if not os.path.exists(self.config.checkpoint_dir): os.makedirs(self.config.checkpoint_dir) self.store_name = os.path.join(self.config.checkpoint_dir, self.store_name) print(self.store_name) if not os.path.exists(self.store_name): os.mkdir(self.store_name) self.config.max_len_bert = max_len_bert if bert_trainable: self.optimizer = Adam(lr=2e-5) else: self.optimizer = 'adam' self.bert_config_file = bert_config_file self.bert_model_file = bert_model_file self.callbacks = [] self.swa = swa
def __init__(self): super().__init__() self.config = Config().get_config('ebay.yaml') self.base_name = 'mssql' self.warehouse = 'mysql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name) self.warehouse_cur = self.base_dao.get_cur(self.warehouse) self.warehouse_con = self.base_dao.get_connection(self.warehouse)
def train_parse_fn(example): """ :param example: 序列化的输入 :return: """ config = Config() features = tf.parse_single_example(serialized=example, features={ 'img_name': tf.FixedLenFeature([], tf.string), 'img_height': tf.FixedLenFeature([], tf.int64), 'img_width': tf.FixedLenFeature([], tf.int64), 'img': tf.FixedLenFeature([], tf.string), 'gtboxes_and_label': tf.FixedLenFeature([], tf.string) }) img_name = features['img_name'] img_height = tf.cast(features['img_height'], tf.int32) img_width = tf.cast(features['img_width'], tf.int32) img = tf.decode_raw(features['img'], tf.uint8) img = tf.reshape(img, shape=[img_height, img_width, 3]) img = tf.cast(img, tf.float32) gt_boxes_and_label = tf.decode_raw(features['gtboxes_and_label'], tf.int32) gt_boxes_and_label = tf.reshape(gt_boxes_and_label, [-1, 5]) # shape of img is (1024, 1024, 3), image_window(4,)[y1, x1, y2, x2] img, gt_boxes_and_label, image_window = image_preprocess.image_resize_pad( img_tensor=img, gtboxes_and_label=gt_boxes_and_label, target_side=config.TARGET_SIDE) img, gt_boxes_and_label = image_preprocess.random_flip_left_right( img_tensor=img, gtboxes_and_label=gt_boxes_and_label) # choose or padding make the gt_bbox_labels is FAST_RCNN_MAX_INSTANCES num_objects = tf.shape(gt_boxes_and_label)[0] object_index = tf.range(num_objects) object_index = tf.random_shuffle(object_index) object_index = object_index[:config.FAST_RCNN_MAX_INSTANCES] gt_boxes_and_label = tf.gather(gt_boxes_and_label, object_index) anchor = make_anchor.generate_pyramid_anchors(config) minibatch_indices, minibatch_encode_gtboxes, \ rpn_objects_one_hot = boxes_utils.build_rpn_target(gt_boxes_and_label[:, :4], anchor, config) num_padding = config.FAST_RCNN_MAX_INSTANCES - tf.shape( gt_boxes_and_label)[0] # (FAST_RCNN_MAX_INSTANCES, 5)[y1, x1, y2, x2, label] num_padding = tf.maximum(num_padding, 0) gt_box_label_padding = tf.zeros((num_padding, 5), dtype=tf.int32) gt_boxes_and_label = tf.concat([gt_boxes_and_label, gt_box_label_padding], axis=0) return {"image_name": img_name, "image": img, "image_window": image_window}, \ {"gt_box_labels": gt_boxes_and_label, "minibatch_indices": minibatch_indices, "minibatch_encode_gtboxes": minibatch_encode_gtboxes, "minibatch_objects_one_hot": rpn_objects_one_hot}
def __init__(self): super().__init__() config = Config().config self.token = config['ur_center']['token'] self.base_name = 'mssql' self.warehouse = 'mysql' self.cur = self.base_dao.get_cur(self.base_name) self.con = self.base_dao.get_connection(self.base_name) self.warehouse_cur = self.base_dao.get_cur(self.warehouse) self.warehouse_con = self.base_dao.get_connection(self.warehouse)
def main(): config = Config() config.actor_layers = [100, 20] config.critic_layers = [100] env = gym.make('Humanoid-v2') max_steps = env.spec.timestep_limit print(max_steps) obs_size = np.shape(env.observation_space)[0] action_size = np.shape(env.action_space)[0] agent = A2CAgent(config, obs_size, action_size) returns = [] observations = [] actions = [] for i in range(10000): print('iter', i) obs = env.reset() obs = np.array(obs[None, :], dtype=float) done = False totalr = 0. steps = 0 while not done: action = agent.act(obs) observations.append(obs) actions.append(action) next_obs, r, done, _ = env.step(action) #print('reward',r) totalr += r steps += 1 env.render() agent.step(obs, action, r, next_obs, done) obs = np.array(next_obs) if steps % 100 == 0: print("%i/%i" % (steps, max_steps)) if steps >= max_steps: break returns.append(totalr) #print('returns', returns) print('mean return', np.mean(returns)) print('std of return', np.std(returns))
def process_profile(**opts): """ process_profile method executes one or more providers listed for desired profile :param opts: :return: """ configs = Config() profile = opts['profile'].lower() opts['output'] += '/' + profile profile_dict = configs.get_config( configs.profile_config[profile]['user-input-config']) for provider, user_input_config in profile_dict.items(): logging.info( f'START processing :: profile {profile} for provider {provider}') opts['provider'] = provider opts['user_input_config'] = user_input_config process(**opts) logging.info( f'END processing :: process profile {profile} for provider {provider}' )
def create_app(default_config_name): app = Flask(__name__) if default_config_name == 'TestConfig': app.config.from_object(TestConfig) else: app.config.from_object(Config) # 解决 app.response_class = MyResponse Config.init_app(app) # app 初始化 db db.init_app(app) # 注册蓝本 main from .api_www import api_bp as api_www_v1 app.register_blueprint(api_www_v1, url_prefix='/wwwapi/v1') # 附加路由和自定义页面 return app
if best_weights is not None: self.model.set_weights(best_weights) scores = self.run_epoch(dev_split, False, return_pred=True) assert scores[compare] == best_scores[compare] best_scores = scores return best_scores if __name__ == '__main__': from docopt import docopt from pprint import pprint args = docopt(__doc__) pprint(args) config = Config.default() if args['--config'] == 'default' else Config.load(args['--config']) for spec in args['--options'].split(','): spec = spec.split(':') assert len(spec) == 2, 'invalid option specified: %' % spec k, v = spec if isinstance(config[k], int): v = int(v) if isinstance(config[k], float): v = float(v) config[k] = v if 'data' in config and os.path.isdir(os.path.join(mydir, 'data', 'saves', config.data)): dataset = Dataset.load(os.path.join(mydir, 'data', 'saves', config.data)) else: config.data = '_'.join([config.train, config.dev, config.featurizer, 'corrupt' + str(config.num_corrupt)]) datasets = { 'supervised': SupervisedDataAdaptor(), 'kbp_eval': KBPEvaluationDataAdaptor(),
for e in self.examples: if e.length not in lens: lens[e.length] = [] lens[e.length] += [e] return lens def batches(self): lens = self.group_by_len() for l, ex in lens.items(): yield l, ex mydir = os.path.dirname(os.path.abspath(__file__)) if __name__ == '__main__': root = os.path.join(mydir, 'experiments', 'deploy') config = Config.load(os.path.join(root, 'config.json')) with open(os.path.join(root, 'featurizer.pkl')) as f: featurizer = pkl.load(f) typechecker = TypeCheckAdaptor(os.path.join(mydir, 'data', 'raw', 'typecheck.csv'), featurizer.vocab) model = get_model(config, featurizer.vocab, typechecker) model.load_weights(os.path.join(root, 'best_weights')) dev_generator = KBPDataAdaptor().online_to_examples(disable_interrupts='victor'!=os.environ['USER']) cache = Cache() max_cache_size = 2**15 log = open(os.path.join(mydir, 'kbp.log'), 'wb') def process_cache(cache): for length, examples in cache.batches(): X, Y, types = featurizer.to_matrix(examples)