def _define_conf(self): self._test_host = self._get_host_from_url(self.driver.current_url) parms = get_conf('parms', self._test_host) self._TIMEOUT_WAIT = parms['TIMEOUT_WAIT'] self._TIMEOUT_LOCATE = parms['TIMEOUT_LOCATE'] self._SHORT_WAIT = parms['SHORT_WAIT'] self._INTERVAL = parms['INTERVAL'] self._BY = parms['BY'] authen = get_conf('authen', self._test_host) self._username = authen['username'] self._password = authen['password'] self._conn_type = authen['conn_type']
def main(): args = parse_args() conf = config.get_conf(args.devel) if args.post is not None: post(args.post, conf) if args.deploy: deploy(conf)
def create(self, model_dir, conf_name, device): self.model_dir = model_dir self.conf = get_conf(conf_name) self.stage = STAGE_GENERATING self.iteration = 0 self.step = 0 # randomly initialize a network and let it be the best network self.network = ZetaGoNetwork(self.conf) self.best_network = ZetaGoNetwork(self.conf) self.best_network.load_state_dict(self.network.state_dict()) self.network.to(device) self.best_network.to(device) # setup the optimizer # notice that the L2 regularization is implemented by # introducing a weight decay self.optimizer = optim.SGD( self.network.parameters(), lr=0.01, momentum=0.9, weight_decay=2 * self.conf.L2_REG, ) # create an example pool self.example_pool = ExamplePool(self.model_dir, self.conf) self.example_pool.prepare_generation() # create a comparator to compare two networks self.comparator = Comparator(self.conf)
def build_stc_ipv4(): traffic_config = get_conf() dst_ip_list = [] #logging.info(traffic_config) #L3 packet construction using Stc traffic parameters in traffic_config p3 = IP() #ip.version must be 4 in this function. p3.version == 4 if "ip_hdr_length" in traffic_config: p3.ihl = int(traffic_config["ip_hdr_length"]) if "ip_tos_field" in traffic_config: p3.tos = int(traffic_config["ip_tos_field"]) if "l3_length" in traffic_config: p3.len = int(traffic_config["l3_length"]) if "ip_id" in traffic_config: p3.id = int(traffic_config["ip_id"]) if "ip_precedence" in traffic_config: p3.flags = int(traffic_config["ip_precedence"]) if "ip_fragment_offset" in traffic_config: p3.frag = int(traffic_config["ip_fragment_offset"]) if "ip_ttl" in traffic_config: p3.ttl = int(traffic_config["ip_ttl"]) if "ip_protocol" in traffic_config: p3.proto = int(traffic_config["ip_protocol"]) if "ip_dst_addr" in traffic_config: p3.dst = traffic_config["ip_dst_addr"] if "ip_src_addr" in traffic_config: p3.src = traffic_config["ip_src_addr"] return p3
def build_stc_tcp(): """L4 TCP packet construction using Stc traffic parameters in traffic_config. :param f: :returns: p4, a tcp instance :raises: None """ #TODO:tcp flags should be a int, such as p4.flag = 7 means "FSR" traffic_config = get_conf() p4 = TCP() if "tcp_src_port" in traffic_config: p4.sport = int(traffic_config["tcp_src_port"]) if "tcp_dst_port" in traffic_config: p4.dport = int(traffic_config["tcp_dst_port"]) if "tcp_seq_num" in traffic_config: p4.seq = int(traffic_config["tcp_seq_num"]) if "tcp_ack_num" in traffic_config: p4.ack = int(traffic_config["tcp_ack_num"]) if "tcp_data_offset" in traffic_config: p4.dataofs = int(traffic_config["tcp_data_offset"]) if "tcp_reserved" in traffic_config: p4.reserved = int(traffic_config["tcp_reserved"]) if "tcp_window" in traffic_config: p4.window = int(traffic_config["tcp_window"]) if "tcp_urgent_ptr" in traffic_config: p4.urgptr = int(traffic_config["tcp_urgent_ptr"]) return p4
def main(): """ The real meat henny """ drive_api_creds = get_conf("drive_api_creds") maps_api_key = get_conf("maps_api_key") home = get_conf("home").replace(' ', '+') work = get_conf("work").replace(' ', '+') book = GspreadHelper(drive_api_creds, "commute_times") time, day_of_week = get_time_and_day() week_sheet = 'Week {}'.format(str(datetime.now().isocalendar()[1])) # Get time in traffic from A to B in minutes travel_time = gmh.time_in_traffic_sec(home, work, maps_api_key) travel_time = int(travel_time/60) # See if sheet already exists, of not make it worksheet_list = [] for sheet in book.list_worksheets: worksheet_list.append(sheet.title) if week_sheet in worksheet_list: book.current_sheet = book.open_sheet(week_sheet) else: book.duplicate_worksheet("TEMPLATE", week_sheet) book.current_sheet = book.open_sheet(week_sheet) # pull all values in sheet to determine row and index (minimize requests) #sheet_values = sheet.get_all_records(head=1) sheet_values = book.get_all_records() sheet_vals = book.get_all_values() #print(list(l for l in sheet_vals)) # generate lists of row and col indexes to find target cell coordinates row_indices = [d['Time'] for d in sheet_values] col_indices = sheet_vals[0] # Now find the target cell from our current time and day target_row = row_indices.index(time) + 2 target_col = col_indices.index(calendar.day_name[day_of_week]) + 1 #pdb.set_trace() # Get time in traffic from A to B #book.current_sheet.update_cell(target_row, target_col, travel_time) book.update_cell(target_row, target_col, travel_time)
def admit(self, patient, pat_type='in'): self._root_path = self._get_root_path(pat_type) labels = get_conf('admit_label', self._test_host) for key in labels.keys(): if getattr(patient, key) != '': self._enter_value(labels[key], getattr(patient, key)) self._click_button('Admit')
def get_yml_file(): """ get yml file """ conf = get_conf() run_dir = get_run_dir() dsn = conf['dsn'] yml_file = os.path.join(run_dir, dsn + ".yml") return yml_file
def set_log(filename): try: cf = config.get_conf() log_format = '%(asctime)s - %(levelname)s - %(message)s' logging.basicConfig(filename=cf.get('log', 'logpath') + filename, level=logging.INFO, format=log_format, filemode='a') except Exception, e: logging.info(e)
def make_body_return_success(datas, share_key, nonce): result_dict = { "code": "00000", "desc": "success", "msg": "成功", "data": datas } if get_conf('env') == 'dev' and not get_conf('aes_to_client'): return result_dict if share_key is None or nonce is None: return result_dict result_dict = json.dumps(result_dict) # print('make_body_return_success', result_dict, share_key, nonce) b_data = result_dict send_data = binascii.hexlify( AES(b_data, sha256(share_key), binascii.unhexlify(nonce))) return {"data": str(send_data, encoding='utf-8')}
def make_body_return_encode_error(error_code, error_desc, error_msg, share_key, nonce): result_dict = { "code": str(error_code), "desc": error_desc, "msg": error_msg, "data": {}, } if get_conf('env') == 'dev' and not get_conf('aes_to_client'): return result_dict if share_key is None or nonce is None: return result_dict result_dict = json.dumps(result_dict) # print('make_body_return_error', result_dict, share_key, nonce) b_data = result_dict send_data = binascii.hexlify( AES(b_data, sha256(share_key), binascii.unhexlify(nonce))) return {"data": str(send_data, encoding='utf-8')}
def main(): # Load base options, initialize parser object print "Getting args parser" parser = get_args_parser() # Build the initial configuration dictonary. config_dict = get_conf(sys.argv) # Run glimpse if argument was specified if '--glimpse' in sys.argv: run_glimpse(config_dict) # Load the subparsers parser = load_subparsers(config_dict, parser) args = parser.parse_args() # Import the right cloud provider provider = config_dict['provider']['name'] if args.smoke: print "Initializing {} provider instance maker object..".format(provider) InstanceMaker = __import__("cloud_providers.{}.instance_maker".format(provider), fromlist=["blah"]).InstanceMaker # Get the meta information for the subcommand being run deployment_tool_name = vars(args)['deployment_tool_name'] deployment_tool_meta_info = _get_meta_info(config_dict['deployment_tools'], deployment_tool_name) # import the right cloud_init_generator plugin if args.smoke: print "Initialzing the {} cloud init config generator".format( vars(args)['deployment_tool_name']) CloudInitGenerator = __import__("deployment_tools.{}.cloud_init_generator".format( vars(args)['deployment_tool_name']), fromlist=["blah"]).CloudInitGenerator # Instantiating a CloudInitGenerator object cloud_init_generator = CloudInitGenerator(config_dict, args, deployment_tool_meta_info) # Generate the cloud_init config string representation cloud_init_config_string = cloud_init_generator.generate_cloud_init() if args.smoke: print "----CLOUD INIT CONFIG-----" print cloud_init_config_string print "--------------------------" instance_maker = InstanceMaker(args.instance_name, cloud_init_config_string, config_dict['provider']['auth_info'], vm_image=args.image, **config_dict['provider']['instance_info'] ) # Create the instance if not args.smoke: instance_maker.create_instance()
def send_stc_pkt(f): """send ipv4 packets according configuration file. :param f: configuration file . :returns: None :raises: None """ init_conf(f) traffic_config = get_conf() dst_ip_list = [] global traffic_results_ret p2 = build_stc_eth() if "l3_protocol" in traffic_config: if traffic_config["l3_protocol"] == "ipv4": p3 = build_stc_ipv4() elif traffic_config["l3_protocol"] == "ipv6": p3 = build_stc_ipv6() else: logging.error("layer 3 version must be 4 or 6") traffic_results_ret['status'] = '0' if "ip_protocol" in traffic_config: if traffic_config["ip_protocol"] == '6': p4 = build_stc_tcp() elif traffic_config["ip_protocol"] == '17': p4 = build_stc_udp() else: logging.error("layer 4 version must be 6 or 17") traffic_results_ret['status'] = '0' #Layer Beyond the TCP, StcPacket() should only use defaut input parameters #since the StcPacket layer is created according to the *traffic_config.xml p5 = StcPacket() #TODO:should fetch burst_loop_count from *_p1_tx.py burst_loop_count = 1000 dst_ip = traffic_config["ip_dst_addr"] dst_ip_list.append(dst_ip) for i in range(burst_loop_count - 1): dst_ip = get_next_valid_ip(dst_ip) dst_ip_list.append(dst_ip) p3.dst = dst_ip_list p = p2 / p3 / p4 / p5 packetList = sendp(p, return_packets=True) #print packetList.summary() traffic_results_ret['status'] = '1'
def build_stc_eth(): traffic_config = get_conf() p2 = Ether() if "mac_dst" in traffic_config: p2.dst = traffic_config["mac_dst"] if "mac_dst" in traffic_config: p2.src = traffic_config["mac_src"] #if "l2_encap" in traffic_config: #p2.type = int(traffic_config["l2_encap"]) #ls(p2) return p2
def check_register_signature(self, register_signature, direct_error=True): env = get_conf('env') if register_signature == "111111" and env == 'dev': return True redis_tools = RedisTools() if redis_tools.exists(register_signature): redis_tools.delete(register_signature) return True if direct_error: self.return_aes_error(30058) else: return False
def main(args): conf = load_conf(args) mysqlInfo = MysqlInfo(conf['dsn']) tab_list = mysqlInfo.get_table_list(conf.get('include_table', []), conf.get('exclude_table', [])) create_yml(tab_list) yml_file = get_yml_file() run_zr(yml_file, get_conf())
def _locate(self, value, by=get_conf('parms')['BY'], timeout=get_conf('parms')['TIMEOUT_LOCATE'], locate_all=False): """Locate element, default by xpath""" load_complete = False while not load_complete: load_complete = True elements1 = self.driver.find_elements_by_tag_name('*') sleep(self._INTERVAL) elements2 = self.driver.find_elements_by_tag_name('*') if len(elements2) != len(elements1): load_complete = False continue try: loadmasks = self.driver.find_elements_by_xpath("//div[contains(@class, 'MyMask')]") except (self.e.NoSuchElementException, self.e.TimeoutException): pass else: for el in loadmasks: if el.get_attribute('style').find('display: none;') < 0: load_complete = False break try: if locate_all: element = WebDriverWait(self.driver, timeout).until( EC.presence_of_all_elements_located((by, value)) ) else: element = WebDriverWait(self.driver, timeout).until( EC.presence_of_element_located((by, value)) ) self._delay(self._INTERVAL) except (self.e.NoSuchElementException, self.e.TimeoutException): self.logger.info('Find the element(s) "' + value + '" by "' + by + '" falied.') raise else: return element
def __init__(self, table, fields): conf = config.get_conf() self.connexion = sqlite3.connect(conf['db_path']) self.connexion.row_factory = sqlite3.Row self.table = table self.fields = fields self.prefix = self.table + '_' self.namespaces = { 'itunes': 'http://www.itunes.com/dtds/podcast-1.0.dtd', 'podcastRF': 'http://radiofrance.fr/Lancelot/Podcast#', }
def get_app(devel=True): # make app object app = Flask(__name__) conf = config.get_conf(devel) app.config.from_object(conf) # database stuff db.init_app(app) db.database.create_tables([Entry, FTSEntry], safe=True) # views stuff app.register_blueprint(bp) return app
def check_authentication(code): env = get_conf('env') if env == 'dev' and code == '111111': return True result = False redis_tool = RedisTools() authentication_value = str(redis_tool.get('authentication'), encoding='utf8') if authentication_value == code: result = True authentication_value = generate_str() redis_tool.set('authentication', authentication_value) return result
def post(self, path): env = get_conf('env') if env != 'dev': return 'Hello' result = path # api_type = _USER_TYPE_ADMIN # # argument_dict, aes_share_key, aes_nonce = self.get_argument_dict( # must_keys=['user_id', 'refresh_token'], check_token=False, api_type=api_type, # request_type=_REQUEST_TYPE_LOGIN, decode_by_inner=_DECODE_TYPE_INNER) return result
def create_photo_path(format): date = now_date() year = date["year"] month = date["month"] day = date["day"] uid = str(uuid.uuid4()) suid = ''.join(uid.split('-')) conf = get_conf() env = conf["env"] if env == "dev" or env == "test": prefix = "test/" elif env == "pd": prefix = "image/" path = prefix + year + "-" + month + "/" + year + "-" + month + "-" + day + "/" + suid + "." + format return path
def main(): #Option mappings from config file cfg = config.get_conf() site = cfg['site'] query = construct_query(cfg) print '........Searching for flights........' print ('%s -- %s' % (config.depart, config.dest)) print ('%s -- %s' % (config.from_date, config.to_date)) content = get_page(site+'?'+query) f = open('r.html', 'wb') f.write(content) f.close() print extract_lowest(content)
def middleware(request): # Code to be executed for each request before # the view (and later middleware) are called. config = get_conf() request.aircraft_rankings_installed = False for mod in config['stats']['mods'].split(','): mod = mod.strip().lower() if mod == 'mod_stats_by_aircraft': request.aircraft_rankings_installed = True response = get_response(request) # Code to be executed for each request/response after # the view is called. return response
def recheck_data_integrity_count_failures(self, source_adaptor, destination_adaptor, cur_run_id, service_name, table_name, process_type): # For loads (teradata) perform the actual select count(*) at this point. For merge (hive) get the count task descriptor string table_online_state = "SUCCESS" conf = config.get_conf() max_failures = int(conf.get('max_failures', 2)) # INCREMENT CHECK: Find the incident record containing the maximum number of increment failures (run_id, min_pkid, max_pkid, cnt_failures) = self.get_recheck_increment_count_task( service_name, table_name, process_type) if cnt_failures > 0: (src_bounded_count, dest_bounded_count) = self.recompute_bounded_counts( source_adaptor, destination_adaptor, run_id, service_name, table_name, process_type, min_pkid, max_pkid, cnt_failures) if src_bounded_count > dest_bounded_count: table_online_state = "FAILURE:INCREMENTAL_COUNT" # RANDOMIZED WINDOW CHECK: Find the incident record containing the maximum number of randomized window count failures (run_id, min_pkid, max_pkid, cnt_failures) = self.get_recheck_random_window_count_task( service_name, table_name, process_type) if cnt_failures > 0: (src_bounded_count, dest_bounded_count) = self.recompute_bounded_counts( source_adaptor, destination_adaptor, run_id, service_name, table_name, process_type, min_pkid, max_pkid, cnt_failures, isWindow=True) if src_bounded_count > dest_bounded_count: table_online_state = "FAILURE:WINDOW_COUNT" return table_online_state
def detect_file(find_file_name, dir_path=None): """ 在项目目录下,递归查找文件,找到返回一个字典{"root":"/test", "file":"/test/t.py"},未找到返回FALSE :param dir_path: 默认为空时在项目主目录下查找,否则在dir_path目录下查找,dir_path必须是系统真实路径,不能是相对路径 :param find_file_name: 要查找的文件名,需要带后缀 :return: """ if not dir_path: server_name = get_conf('bases')['server_name'] real_path = os.path.realpath(__file__) file_index = real_path.rfind(server_name) dir_path = os.path.abspath( os.path.join(real_path[:file_index], server_name)) for root, dirs, files in os.walk(dir_path): if find_file_name in files: return dict(root=root, file=os.path.abspath(os.path.join( root, find_file_name))) return False
def check_vcode(self, vcode, code_type, mobile, user_type=_USER_TYPE_INVEST, register_by=None, direct_return_error=True): if register_by in [_REGISTER_BY_MOBILE]: self.check_mobile_all(mobile) elif register_by in [_REGISTER_BY_EMAIL]: self.check_email(mobile) elif register_by in [ _REGISTER_AUTHEN_MOBILE, _REGISTER_SET_PAYPD_MOBILE, _RESET_PWD_MOBILE, _REGISTER_AUTHEN_EMAIL, _REGISTER_SET_PAYPD_EMAIL, _RESET_PWD_EMAIL, _REGISTER_SET_EMAIL, _REGISTER_SET_MOBILE, _REGISTER_RESET_PAYPD_MOBILE, _REGISTER_RESET_PAYPD_EMAIL, ]: pass else: self.check_mobile(mobile) env = get_conf("env") if vcode == "111111" and env == 'dev': return True redis = RedisTools() redis_key = self.get_sms_key(mobile, code_type, user_type) check_result = redis.get(redis_key) if check_result: redis.delete(redis_key) return True else: if direct_return_error: self.return_aes_error(30059) else: return False
def test_model(model, test_x, test_y, tok_ori, tok_tra, filename=config.output_file, full=False): # Test the model. log.info('Testing model...') # Get predicted translations from trained model. if full: preds = model.predict_classes( test_x.reshape((test_x.shape[0], test_x.shape[1]))) else: preds = [model.predict_classes(instance) for instance in test_x] # Convert integer sequences to texts. original_bokmaal_sentences = convert_text(test_x, tok_ori, full=full) original_nynorsk_sentences = convert_text(test_y, tok_tra, full=full) preds_text = convert_text(preds, tok_tra, full=full) # Open file for printing predicted translations to. file = open(filename, 'w', encoding='utf-8') file.write(config.get_conf()) # Write original bokmaal, nynorsk and predicted translated sentences to file. for i in range(len(preds_text)): file.write('\nBokmål:\n{}'.format(original_bokmaal_sentences[i])) file.write('\nNynorsk:\n{}'.format(original_nynorsk_sentences[i])) file.write('\nGenerated nynorsk:\n{}\n'.format(preds_text[i])) file.close() log.info( 'Generated nynorsk sentences was successfully saved to file: \"{}\".'. format(filename)) print( 'Generated nynorsk sentences was successfully saved to file: \"{}\".'. format(filename))
def load(self, checkpoint_file, model_dir, conf_name, device): checkpoint = torch.load(checkpoint_file) self.model_dir = model_dir if conf_name != '': self.conf = get_conf(conf_name) else: self.conf = checkpoint['conf'] self.stage = checkpoint['stage'] self.iteration = checkpoint['iteration'] self.step = checkpoint['step'] self.network = ZetaGoNetwork(self.conf) self.network.load_state_dict(checkpoint['network']) self.best_network = ZetaGoNetwork(self.conf) self.best_network.load_state_dict(checkpoint['best_network']) self.network.to(device) self.best_network.to(device) self.optimizer = optim.SGD( self.network.parameters(), lr=0.01, momentum=0.9, weight_decay=2 * self.conf.L2_REG, ) self.optimizer.load_state_dict(checkpoint['optimizer']) self.example_pool = ExamplePool(self.model_dir, self.conf) if self.stage == STAGE_GENERATING: self.example_pool.load( torch.load( f'{self.model_dir}/' f'example_pool_checkpoint_[iter_{self.iteration}].pt')) else: self.example_pool.load(checkpoint['example_pool']) self.comparator = Comparator(self.conf)
def check_vcode_picture(self, vcode_str, vcode_key='', vcode_type=_REQUEST_TYPE_REGISTER, user_type=_USER_TYPE_INVEST): env = get_conf('env') if vcode_str == "111111" and env == 'dev': return True if vcode_key == '': redis_key = self.vcode_picture_key + str(vcode_type) + ':' + str( user_type) + ':' + str(vcode_str).lower() else: redis_key = self.vcode_picture_key + str(vcode_type) + ':' + str( user_type) + ':' + str(vcode_key) redis_tools = RedisTools() redis_result = redis_tools.get(redis_key) raise_logger('check vcode' + ' ' + str(redis_key)) if redis_result is None: self.return_aes_error(30052) if str(redis_result, encoding='utf-8').lower() != str(vcode_str).lower(): self.return_aes_error(30060) redis_tools.delete(redis_key) return True
type = "string", default = "5.4", help = "version of the Satellite API", ) parser.add_option( "-l", "--softwarechannel-label", action = "callback", callback = config.parse_string, dest = "softwarechannel_label", type = "string", default = None, help = "softwarechannel label" ) (options, args) = config.get_conf(parser) if options.softwarechannel_label is None: parser.error('Error: specify label, -l or --softwarechannel-label') sys.exit(1) # Get session key via auth namespace. client = xmlrpclib.ServerProxy(options.satellite_url, verbose=0) key = client.auth.login(options.satellite_login, options.satellite_password) try: rpms = client.channel.software.listAllPackages( key, options.softwarechannel_label, ) except xmlrpclib.Fault, e:
default = "5.4", help = "version of the Satellite API", ) parser.add_option( "-n", "--snippet-name", action = "callback", callback = config.parse_string, dest = "snippet_name", type = "string", default = None, help = "snippet name option" ) (options, args) = config.get_conf(parser) if options.satellite_url is None: parser.error('Error: specify URL, -u or --satellite-url') if options.satellite_login is None: parser.error('Error: specify login, -l or --login') if options.satellite_password is None: parser.error('Error: specify password, -p or --password') if options.snippet_name is None: parser.error('Error: specify name, -n or --snippet-name') # Get session key via auth namespace. client = xmlrpclib.ServerProxy(options.satellite_url, verbose=0) key = client.auth.login(options.satellite_login, options.satellite_password) # create kickstart profile
# coding=utf-8 from bs4 import BeautifulSoup import multiprocessing import logging import handle_mysqldb import config cf = config.get_conf() db = handle_mysqldb.mysqldb() config.set_log('parse_link.log') def parse_link(page_source, id): try: soup = BeautifulSoup(page_source, 'lxml') jsonurls = soup.find('a', class_='js-image-size__link lazy-anchor') if jsonurls is not None: hrefurl = jsonurls['href'] jsonurl = cf.get('web', 'basic_url') + hrefurl insert_sql = "insert into json_urls(link_id,json_url) values(%s,'%s')" % ( id, jsonurl) db.insert_mysql(insert_sql) update_sql = "update link_urls set status=2 where id=%s" % (id) db.update_mysql(update_sql) except Exception, e: logging.info(e) if __name__ == '__main__': pool = multiprocessing.Pool(processes=cf.getint('web', 'pro_num')) select_sql = "select id,page_source from link_urls where status=1"
def __init__(self): self.conf = config.get_conf()