def yolo_init(label_path='./cfg/yolov3.txt', config_path='./cfg/yolov3.cfg', weights_path='./cfg/yolov3.weights'): """ label_path:类别标签文件的路径 config_path:模型配置文件的路径 weights_path:模型权重文件的路径 """ global LABELS global COLORS global net global ln global yolo_inited # 加载类别标签文件 LABELS = open(label_path).read().strip().split("\n") nclass = len(LABELS) # 为每个类别的边界框随机匹配相应颜色 np.random.seed(42) COLORS = np.random.randint(0, 255, size=(nclass, 3), dtype='uint8') # 加载模型配置和权重文件 logger.logger().info('从硬盘加载YOLO......') net = cv2.dnn.readNetFromDarknet(config_path, weights_path) # 获取YOLO输出层的名字 ln = net.getLayerNames() ln = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()] yolo_inited = True
def SetRelayConcurrencyNew(begTime): try: conn = sqlite3.connect(db_path) cur = conn.cursor() sql = "select count(relayIp),relayIp from confReport where cnfEntName='{0}' and (userId not in (55,59,63,75,77,79))and begTS<'{1}' and LastTS>'{1}' GROUP BY relayIp".format( entName1, begTime) date = cur.execute(sql) RelayConcurrencyNumber = cur.fetchall() cur.close() conn.close() RelayConcurrencyNumberLen = len(RelayConcurrencyNumber) for i in range(RelayConcurrencyNumberLen): for j in range(RelayAllLen): RelayConcurrencyNumberIPaddr = RelayConcurrencyNumber[i][1] RelayIPaddr = RelayAll[j][1] if RelayConcurrencyNumberIPaddr == RelayIPaddr: RelayArray = numpy.array( [RelayAll[j][0], RelayConcurrencyNumber[i][0]]) elif RelayConcurrencyNumberIPaddr not in RelayAllList: RelayArray = numpy.array([ RelayConcurrencyNumberIPaddr, RelayConcurrencyNumber[i][0] ]) RelayResultList.append(RelayArray) return (RelayResultList, begTime) except: s = traceback.format_exc() logger().error(s)
def SetRelayIntoInfluxdb(json_body): try: client = influxdb.InfluxDBClient('localhost', '8086', '', '', 'RelayConcurrencyNumber') client.write_points(json_body) except: s = traceback.format_exc() logger().error(s)
def UserConcurrencyTimeStamp(): try: todaytime = TodayTimeStamp() startprogramtime = int(time.time()) pointnumber = int((startprogramtime - todaytime) / TimeInterval) userconcurrencytimestamp = todaytime + (TimeInterval * pointnumber) - TimeBefore return userconcurrencytimestamp except: s = traceback.format_exc() logger().error(s)
def TodayTimeStamp(): try: timevalue = "00:00:00" today = (date.today() + timedelta()).strftime("%Y-%m-%d") todaytime = today + timevalue todaytimestamp = time.strptime(todaytime, "%Y-%m-%d%H:%M:%S") todaytimestamp = int(time.mktime(todaytimestamp)) return todaytimestamp except: s = traceback.format_exc() logger().error(s)
def __init__(self, use_sandbox, name='test', experiment_id=None, init_task_config={}): self.use_sandbox = use_sandbox self.name = name if experiment_id == None: self.exp_id = db.experiments.insert({ 'name': name, 'createTime': datetime.now(pytz.utc), 'use_sandbox': use_sandbox, 'tasks': [], 'init_task_config': init_task_config }) else: self.exp_id = experiment_id self.init_task_config = init_task_config self.logr = logger(fname=type(self).__name__) self.logr.debug( "Experiment created: %s, Sandbox = %s, init_task_config=%s" % (self.exp_id, self.use_sandbox, json_util.dumps(init_task_config)), class_name=type(self).__name__) # create tasks self.tasks = [] # print params print self.get_task_conf()
def runTest(dataHand): log = logger(dataHand.testResultDataDir) testData = dataHand.getTestData() leng = len(testData) paramDict = readConfig() for index,i in enumerate(testData): res = httpRequest(i, paramDict) ret = assertRes(i['断言/表达式'], res) getRet = {} if i['参数提取/表达式']: getRet = getParam(i['参数提取/表达式'], res) paramDict.update(getRet) log.info('\n{}\n{}\n{}\n\n{}\n{}\n\n'.format( f'╭─ 用例{i["用例编号"]}{"─"*14}request begin{"─"*13}{index+1}/{leng} ─╮', f'{res.request.method} {unquote(res.url)}', '\n'.join('{}: {}'.format(k, v) for k, v in res.request.headers.items()), type(res.request.body), f'╰─ {res.status_code}{"─"*16}request end{"─"*14}{ret[1]} ─╯' )) row = i['用例编号']+2 list = [ [row, dataHand.data[0].index('实际(返回)结果')+1, res.text], [row, dataHand.data[0].index('接口耗时(s)')+1, int(res.elapsed.total_seconds()*1000)/1000], [row, dataHand.data[1].index('参考结果')+1, ret[0]], [row, dataHand.data[0].index('测试结果')+1, ret[1]], [row, dataHand.data[1].index('提取结果')+1, json.dumps(getRet) if getRet else None] ] dataHand.writeData(list)
def SetDayConcurrencyNew(begTime): try: conn = sqlite3.connect(db_path) cur = conn.cursor() sql = "select count(distinct userId) from confReport where cnfEntName='{0}' and (userId not in (55,59,63,75,77,79))and begTS<'{1}' and LastTS>'{1}'".format( entName1, begTime) date = cur.execute(sql) UserConcurrencyNumber = cur.fetchall() cur.close() conn.close() UserConcurrencyNumber = UserConcurrencyNumber[0].__str__().strip( '(').strip(')').strip(',') return (UserConcurrencyNumber, begTime) except: s = traceback.format_exc() logger().error(s)
def __init__(self, config, dir_path): self.dir_path = dir_path self.config = config self.config.print_configuration() self.PD_freq = self.config.conf['LLC-frequency'] self.Physics_freq = self.config.conf['Physics-frequency'] self.network_freq = self.config.conf['HLC-frequency'] self.sampling_skip = int(self.PD_freq / self.network_freq) self.reward_decay = 1.0 self.reward_scale = config.conf['reward-scale'] self.reward_scale = self.reward_scale / float( self.sampling_skip) # /10.0#normalizing reward to 1 self.max_time = 10 #16 self.max_step_per_episode = int(self.max_time * self.network_freq) self.env = Walkman( max_time=self.max_time, renders=True, initial_gap_time=0.1, PD_freq=self.PD_freq, Physics_freq=self.Physics_freq, Kp=config.conf['Kp'], Kd=config.conf['Kd'], bullet_default_PD=config.conf['bullet-default-PD'], controlled_joints_list=config.conf['controlled-joints'], logFileName=dir_path, isEnableSelfCollision=False) config.conf['state-dim'] = self.env.stateNumber self.agent = Agent(self.env, self.config) # self.agent.load_weight(dir_path+'/best_network') self.logging = logger(dir_path) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 self.control = Control(self.config, self.env) # create new network self.force = [0, 0, 0] self.force_chest = [0, 0, 0] # max(0,force_chest[1]-300*1.0 / EXPLORE)] self.force_pelvis = [0, 0, 0] self.motion = Motion(config) self.image_list = []
def setUp(self): self.testcaseinfo = TestCaseInfo(id=22, name="DHCP 能否被正确关闭 ") config = configparser.ConfigParser() config.read(r"./common/data.ini") filepath = config.get("logfile", "logfile") self.log = logger(filepath) self.reportfile = config.get("report", "xlsfile") self.row = config.get("report", "line") self.teststarttime = config.get("teststarttime", "teststarttime") config.set("report", "line", str(int(self.row) + 1)) config.write(open("./common/data.ini", "w")) # 读取报告行数和报告名称 self.lanpage=LanPage()
def setUp(self): #默认项检查,目前假设默认wifi 名字是frt_test,密码时iot23321 self.testcaseinfo = TestCaseInfo(id=27, name="不输入wifi密码时是否可以保存成功 ") config = configparser.ConfigParser() config.read(r"./common/data.ini") filepath = config.get("logfile", "logfile") self.log = logger(filepath) self.reportfile = config.get("report", "xlsfile") self.row = config.get("report", "line") self.teststarttime = config.get("teststarttime", "teststarttime") config.set("report", "line", str(int(self.row) + 1)) config.write(open("./common/data.ini", "w")) # 读取报告行数和报告名称 self.wifipage = WifiPage()
def setUp(self): self.baseurl = base_url() self.loginpage = LoginPage() self.testcaseinfo = TestCaseInfo(id=5, name="不填写密码时的提示信息是否正确 ") config = configparser.ConfigParser() config.read(r"./common/data.ini") filepath = config.get("logfile", "logfile") self.log = logger(filepath) self.reportfile = config.get("report", "xlsfile") self.row = config.get("report", "line") self.teststarttime = config.get("teststarttime", "teststarttime") config.set("report", "line", str(int(self.row) + 1)) config.write(open("./common/data.ini", "w"))
def setUp(self): self.testcaseinfo = TestCaseInfo(id=31, name="关闭SSID,查看电脑列表能否搜索到wifi名称 ") config = configparser.ConfigParser() config.read(r"./common/data.ini") filepath = config.get("logfile", "logfile") self.log = logger(filepath) self.reportfile = config.get("report", "xlsfile") self.row = config.get("report", "line") self.teststarttime = config.get("teststarttime", "teststarttime") config.set("report", "line", str(int(self.row) + 1)) config.write(open("./common/data.ini", "w")) # 读取报告行数和报告名称 self.wifipage = WifiPage()
def setUp(self): self.baseurl = base_url() self.setuppage = SetupPage() config = configparser.ConfigParser() config.read(r"./common/data.ini") self.filepath = config.get("logfile", "logfile") self.reportfile = config.get("report", "xlsfile") self.row = config.get("report", "line") self.teststarttime = config.get("teststarttime", "teststarttime") config.set("report", "line", str(int(self.row) + 1)) config.write(open("./common/data.ini", "w")) print(self.filepath) self.log = logger(self.filepath) self.testcaseinfo = TestCaseInfo(id=36, name="原密码错误时提示信息检查 ")
def __init__(self): self.testcaselistfile = "./common/testcases.txt" now = time.strftime("%Y-%m-%d %H_%M_%S") logpath = './log/' + now + '.log' config=configparser.ConfigParser() config.read(r"./common/data.ini") config.set("logfile","logfile",logpath) config.write(open("./common/data.ini", "w")) self.log=logger(logpath) path = r"./report/%s" % now self.xls_file = r"%s.xls" % (path) teststarttime=TT.write_title(self.xls_file) config.set('teststarttime','teststarttime',teststarttime) config.set("report","xlsfile",self.xls_file) config.write(open("./common/data.ini", "w"))
def __init__(self, config): self.config = config env_name = 'Walker2DBulletEnv-v0' #'AntBulletEnv-v0'#'Walker2DBulletEnv-v0'#'HumanoidBulletEnv-v0' self.env = gym.make(env_name) self.config.conf['state-dim'] = 10 self.config.conf['action-dim'] = 3 self.agent = Agent(self.env, self.config) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 # load weight from previous network # dir_path = 'record/2017_12_04_15.20.44/no_force' # '2017_05_29_18.23.49/with_force' # create new network dir_path = 'TRPO/record/' + '3D/' + '/' + datetime.now().strftime( '%Y_%m_%d_%H.%M.%S') if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(dir_path + '/saved_actor_networks'): os.makedirs(dir_path + '/saved_actor_networks') if not os.path.exists(dir_path + '/saved_critic_networks'): os.makedirs(dir_path + '/saved_critic_networks') self.logging = logger(dir_path) config.save_configuration(dir_path) config.record_configuration(dir_path) config.print_configuration() self.agent.load_weight(dir_path) self.dir_path = dir_path #load test data and training data self.train_data = [np.ones((10000, 10)), np.zeros((10000, 3))] self.test_data = [np.ones((10000, 10)), np.zeros((10000, 3))] self.min_test_MSE = 10000000000
def __init__(self, config, dir_path): self.dir_path = dir_path self.config = config self.PD_freq = self.config.conf['LLC-frequency'] self.Physics_freq = self.config.conf['Physics-frequency'] self.network_freq = self.config.conf['HLC-frequency'] self.sampling_skip = int(self.PD_freq/self.network_freq) self.max_time = 6 self.max_step_per_episode = int(self.max_time*self.network_freq) self.env = Valkyrie( max_time=self.max_time, renders=True, initial_gap_time=0.1, PD_freq=self.PD_freq, Physics_freq=self.Physics_freq, Kp=config.conf['Kp'], Kd=config.conf['Kd'], bullet_default_PD=config.conf['bullet-default-PD'], controlled_joints_list=config.conf['controlled-joints'], logFileName=dir_path, isEnableSelfCollision=False) config.conf['state-dim'] = self.env.stateNumber+1 self.logging = logger(dir_path) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 self.control = Control(self.config, self.env) # img = [[1,2,3]*50]*100 img = np.zeros((240,320,3)) self.image = plt.imshow(img,interpolation='none',animated=True) self.ax=plt.gca() plt.axis('off') self.image_list = [] self.ref_motion = Motion(config=self.config, dsr_gait_freq=0.6)
def __init__(self, config): self.config = config self.network_freq = 125#self.config.conf['HLC-frequency'] self.reward_decay = 1.0 self.reward_scale = config.conf['reward-scale'] self.max_time_per_train_episode = 10#self.config.conf['max-train-time'] self.max_step_per_train_episode = int(self.max_time_per_train_episode*self.network_freq) self.max_time_per_test_episode = 10#self.config.conf['max-test-time']#16 self.max_step_per_test_episode = int(self.max_time_per_test_episode*self.network_freq) env_name = 'Walker2DBulletEnv-v0'#'AntBulletEnv-v0'#'Walker2DBulletEnv-v0'#'HumanoidBulletEnv-v0' self.env = gym.make(env_name) # self.env.render() print(self.env.observation_space) print(self.env.action_space) self.config.conf['state-dim'] = self.env.observation_space.shape[0] self.config.conf['action-dim'] = self.env.action_space.shape[0] self.config.conf['actor-logstd-initial'] = np.zeros((1, self.config.conf['action-dim'])) self.config.conf['actor-logstd-bounds'] = np.ones((2,self.config.conf['action-dim'])) self.config.conf['actor-output-bounds'] = np.ones((2,self.config.conf['action-dim'])) self.config.conf['actor-output-bounds'][0][:] = -1 * np.ones(self.config.conf['action-dim'],) self.config.conf['actor-output-bounds'][1][:] = 1* np.ones(self.config.conf['action-dim'],) self.config.conf['actor-logstd-initial'] *= np.log(1.0) # np.log(min(std*0.25, 1.0))#0.5 self.config.conf['actor-logstd-bounds'][0] *= np.log(0.2) self.config.conf['actor-logstd-bounds'][1] *= np.log(1.0) # 0.6 self.agent = Agent(self.env, self.config) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 # load weight from previous network # dir_path = 'record/2017_12_04_15.20.44/no_force' # '2017_05_29_18.23.49/with_force' # create new network dir_path = 'TRPO/record/' + '3D/' + env_name +'/' + datetime.now().strftime('%Y_%m_%d_%H.%M.%S') if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(dir_path + '/saved_actor_networks'): os.makedirs(dir_path + '/saved_actor_networks') if not os.path.exists(dir_path + '/saved_critic_networks'): os.makedirs(dir_path + '/saved_critic_networks') self.logging = logger(dir_path) config.save_configuration(dir_path) config.record_configuration(dir_path) config.print_configuration() self.agent.load_weight(dir_path) self.dir_path = dir_path self.on_policy_paths = [] self.off_policy_paths = [] self.buffer = ReplayBuffer(self.config.conf['replay-buffer-size']) self.force = [0,0,0] self.force_chest = [0, 0, 0] # max(0,force_chest[1]-300*1.0 / EXPLORE)] self.force_pelvis = [0, 0, 0]
logr.error('get hit failed', class_name='get_hit') return 'Sorry, some error has occurred.' @app.route('/unloaded', methods=['GET']) def unloaded(): hit_id = request.args.get('hitId', 'NoHitId') worker_id = request.args.get('workerId', 'NoHitId') try: HitInstance.objects.get(hit_id=hit_id).isUnLoaded(worker_id=worker_id) except Exception as e: print "HitInstance not found, hitId = %s"%hit_id return "hit unloaded" @app.route('/get_ipinfo', methods=['GET']) @support_jsonp def get_ipinfo(): ip = get_header_ip() response = json.loads(urlopen('http://ipinfo.io/%s/json' % ip).read()) return jsonify(response) use_sandbox = True if __name__ == "__main__": logr = logger('./Logs', 'pcbc-mturk-dev', insertDate=False) app.run(host='0.0.0.0', port=4999, debug=True) else: # init logger app.wsgi_app = ProxyFix(app.wsgi_app) logr = logger('./Logs', 'pcbc-mturk-server', insertDate=False) use_sandbox = False
import time from random import randrange import urllib3 import pandas as pd import numpy as np from app.constant import DISTANCE_API, DRIVING_API, WALKING_API, BICYCLING_API, KEY, EXCEL_FILE, NEW_EXCEL_FILE # from app.constant import TXT_FILE, TMP_TXT_FILE from common.logger import logger from utils.excel import generate_excel_file, append_row, init_excel from utils.amap import get_request_api, api_usability, lat_lon_decimal from utils.text import excel_to_txt, text_line_append, text_to_excel log = logger(app_name='app', file_name=__name__) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # excel_file = new_excel_file = EXCEL_FILE excel_file = 'latlon.xlsx' # txt_file = TXT_FILE txt_file = 'new.txt' df = pd.read_excel(excel_file, engine='openpyxl') # row_num, column_num = df.shape df_rows = df.shape[0] df_cols = df.shape[1] for index in range(df_rows + 1): global key # 一个key最多使用30000次,本程序for一次4个请求。 index_func_call_number = 30000 // 4 - 5
def __init__(self, config): self.config = config self.PD_freq = self.config.conf['LLC-frequency'] self.Physics_freq = self.config.conf['Physics-frequency'] self.network_freq = self.config.conf['HLC-frequency'] self.sampling_skip = int(self.PD_freq / self.network_freq) self.reward_decay = 1.0 self.reward_scale = config.conf['reward-scale'] self.reward_scale = self.reward_scale / float( self.sampling_skip) # /10.0#normalizing reward to 1 self.max_time_per_train_episode = self.config.conf['max-train-time'] self.max_step_per_train_episode = int(self.max_time_per_train_episode * self.network_freq) self.max_time_per_test_episode = self.config.conf['max-test-time'] #16 self.max_step_per_test_episode = int(self.max_time_per_test_episode * self.network_freq) self.train_external_force_disturbance = True if self.train_external_force_disturbance == True: path_str = 'with_external_force_disturbance/' else: path_str = 'without_external_force_disturbance/' self.test_external_force_disturbance = True self.env = Valkyrie( max_time=self.max_time_per_train_episode, renders=False, initial_gap_time=0.5, PD_freq=self.PD_freq, Physics_freq=self.Physics_freq, Kp=config.conf['Kp'], Kd=config.conf['Kd'], bullet_default_PD=config.conf['bullet-default-PD'], controlled_joints_list=config.conf['controlled-joints']) config.conf['state-dim'] = self.env.stateNumber self.agent = Agent(self.env, self.config) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 self.control = Control(self.config, self.env) # load weight from previous network # dir_path = 'record/2017_12_04_15.20.44/no_force' # '2017_05_29_18.23.49/with_force' # create new network dir_path = 'TRPO/record/' + '3D_push/' + path_str + datetime.now( ).strftime('%Y_%m_%d_%H.%M.%S') if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(dir_path + '/saved_actor_networks'): os.makedirs(dir_path + '/saved_actor_networks') if not os.path.exists(dir_path + '/saved_critic_networks'): os.makedirs(dir_path + '/saved_critic_networks') self.logging = logger(dir_path) config.save_configuration(dir_path) config.record_configuration(dir_path) config.print_configuration() self.agent.load_weight(dir_path) self.dir_path = dir_path self.on_policy_paths = [] self.off_policy_paths = [] self.buffer = ReplayBuffer(self.config.conf['replay-buffer-size']) self.force = [0, 0, 0] self.force_chest = [0, 0, 0] # max(0,force_chest[1]-300*1.0 / EXPLORE)] self.force_pelvis = [0, 0, 0]
import shutil import pandas as pd import zipfile import os import time from pathlib import Path from common.logger import logger app_name = os.path.basename(__file__).split('.')[0] logger = logger(app_name) current_path = Path.cwd() de_tmp_path = '../tmp' origin = r'C:\Users\ht\docs\地址清洗全国数据-运单数据' save_path = r'C:\Users\ht\Desktop\waybill' start_time = time.time() logger.info('开始地址清洗数据切割') # 所有的zip文件 all_zip_file = [f for f in Path(origin).glob('*') if str(f).endswith('.zip')] logger.info(f'共计{len(all_zip_file)}个压缩文件') for i, name in enumerate(all_zip_file): logger.info(i + 1, name) # """ # 逐个对zip文件进行操作 for zip_file in all_zip_file: # zip_file = all_zip_file[31] # 测试38.zip这个最大的压缩文件包。
async def close_dialog(dialog): """close browser dialog windows""" logger(dialog.message) await dialog.dismiss()
# coding utf8 import codecs import random import re from pathlib import Path from datetime import datetime from retrying import retry import asyncio from common.constant import TOKEN_PATH from common.logger import logger logger = logger(app_name='chromium', file_name=__name__) def set_user_agent(): """choose one user-agent randomly for chromium""" user_agents = [ "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4092.1 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", ] user_agent = random.choice(user_agents) return user_agent def generate_headers(user_agent, referer): """mock browser generate correct Request Headers parameters""" headers = { "Host": "fly.cainiao.com",
def __init__(self, config, dir_path): self.dir_path = dir_path self.config = config self.config.load_configuration(dir_path) self.config.print_configuration() self.PD_freq = self.config.conf['LLC-frequency'] self.Physics_freq = self.config.conf['Physics-frequency'] self.network_freq = self.config.conf['HLC-frequency'] self.sampling_skip = int(self.PD_freq / self.network_freq) self.reward_decay = 1.0 self.reward_scale = config.conf['reward-scale'] self.reward_scale = self.reward_scale / float( self.sampling_skip) # /10.0#normalizing reward to 1 self.max_time = 10 #16 self.max_step_per_episode = int(self.max_time * self.network_freq) self.env = Valkyrie( max_time=self.max_time, renders=False, initial_gap_time=0.5, PD_freq=self.PD_freq, Physics_freq=self.Physics_freq, Kp=config.conf['Kp'], Kd=config.conf['Kd'], bullet_default_PD=config.conf['bullet-default-PD'], controlled_joints_list=config.conf['controlled-joints'], logFileName=dir_path, isEnableSelfCollision=False) config.conf['state-dim'] = self.env.stateNumber + 2 self.agent = Agent(self.env, self.config) self.agent.load_weight(dir_path + '/best_network') self.logging = logger(dir_path) self.episode_count = 0 self.step_count = 0 self.train_iter_count = 0 self.best_reward = 0 self.best_episode = 0 self.best_train_iter = 0 self.control = Control(self.config, self.env) # create new network self.force = [0, 0, 0] self.force_chest = [0, 0, 0] # max(0,force_chest[1]-300*1.0 / EXPLORE)] self.force_pelvis = [0, 0, 0] # img = [[1,2,3]*50]*100 img = np.zeros((240, 320, 3)) self.image = plt.imshow(img, interpolation='none', animated=True) self.ax = plt.gca() plt.axis('off') self.image_list = [] self.ref_motion = Motion(config=self.config, dsr_gait_freq=0.6)
from common.dbclient import db from models import Resource from common.logger import logger import numpy as np import pandas as pd import operator import pytz, datetime logr = logger('pcbc-mturk.models.Message', verbose=False) class Message(Resource.Resource): def __init__(self, experiment_id, text, message_id, shortname, task_id, updated={}, collection="", write_enabled=True): if collection != "": self.C = db[collection] self.experiment_id = experiment_id self.text = text self.message_id = message_id self.shortname = shortname self.task_id = str(task_id) self.spec = { 'experiment_id': self.experiment_id, 'text': self.text, 'message_id': self.message_id, 'shortname': self.shortname, 'task_id': self.task_id, } self.logr = logr
from random import shuffle, choice from common.dbclient import db, client from common.logger import logger from dateutil import parser from datetime import datetime logr = logger(fname='pcbc-mturk-helpers', insertDate=False) def pull_random_messages(requirements): task_id = requirements['task_id'] worker_id = requirements['workerId'] # 1 is the verification_message message_count_per_hit = requirements['message_count_per_hit'] - 1 no_of_raters_per_message = requirements['no_of_raters_per_message'] if client: potential_messages = [x for x in db.message_votes.find( {'task_id': task_id, 'workers': {'$nin': [worker_id]}})] incomplete_messages = [ m for m in potential_messages if len(m['votes']) < no_of_raters_per_message] # randomly choose message (n=message_count_per_hit) shuffle(incomplete_messages) chosen_messages = incomplete_messages[:message_count_per_hit] # If there only few messages in the chosen_messages, choose few from # finished ones that the worker hasn't seen. if len(chosen_messages) < message_count_per_hit: finished_messages = [ m for m in potential_messages if len(m['votes']) == no_of_raters_per_message] shuffle(finished_messages) additional_messages = finished_messages[ :(message_count_per_hit - len(chosen_messages))] chosen_messages.extend(additional_messages)
for w in bad_workers: bad_workers[w] = {'bad': bad_workers[w]} bad_workers[w]['badass'] = [ x['assignmentId'] for x in bad_votes if x['workerId'] == w ] bad_workers[w]['all'] = db.responses.find({'workerId': w}).count() return render_template('experiment.html', **locals()) @app.route('/utils/<op>', methods=['GET']) def utils(op): query = dict(request.args.items()) # aid = query.get('aid',False)#.strip() # f = getattr(turk_utils,op) # raise res = turk_utils.gen_operation(op, query) # if not aid: # res = f() # else: # res = f(aid) return json_util.dumps(res) if __name__ == "__main__": logr = logger('./Logs', 'pcbc-mturk-analysis-dev') app.run(host='0.0.0.0', port=4998, debug=True) else: # init logger logr = logger('./Logs', 'pcbc-mturk-analysis-server')
import common.mturk as mturk import common.task_utils as task_utils from common.logger import logger from bson import ObjectId import traceback import time from datetime import datetime import pytz from common.dbclient import db logr = logger('./Logs', 'check_status_task', insertDate=False) # logr.plog("Checking status...") conf_turk = { "use_sandbox": False, "stdout_log": False, "verify_mturk_ssl": True, "aws_key": "AKIAJLJ5F2MLV36GZKAA", "aws_secret_key": "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" } conf_sbox = { "use_sandbox": True, "stdout_log": False, "verify_mturk_ssl": True, "aws_key": "AKIAJLJ5F2MLV36GZKAA", "aws_secret_key": "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" } m_Turk = mturk.MechanicalTurk(conf_turk) m_sandbox = mturk.MechanicalTurk(conf_sbox)
# coding utf8 import asyncio import uuid from pyppeteer import launch from common.logger import logger from common.constant import WIDTH, HEIGHT, EXPRESS_DOMAIN_URL from utils.chromium import set_user_agent, intercept_request, read_token, get_cookies, generate_headers logger = logger(app_name='cookies_token', file_name=__name__) async def get_cookies_token(username, password, name, stat_date): browser = await launch( headless=True, slowMo=15, autoClose=False, args=[ '--disable-infobars', '--disable-extensions', '--hide-scrollbars', '--mute-audio', '--no-sandbox', '--disable-gpu', '--disable-setuid-sandbox', '--disable-translate', '--safebrowsing-disable-auto-update', '--disable-bundled-ppapi-flash', '--window-size={},{}'.format(WIDTH, HEIGHT) ], dumpio=True) page = await browser.newPage() user_agent = set_user_agent() await page.setUserAgent(user_agent) await page.setViewport({'width': WIDTH, 'height': HEIGHT})