def facePlusPlus(filenPath): http_url = "https://api-cn.faceplusplus.com/facepp/v3/detect" key = "JPofXVcqsEro1KO7pseh9H2KFXLuTjns" secret = "vL2SaCF41T78-S-iz0DI-v4rrGxn1cuP" filepath = filenPath data = {"api_key": key, "api_secret": secret, "return_landmark": "0"} files = {"image_file": open(filepath, "rb")} response = requests.post(http_url, data=data, files=files) req_con = response.content.decode('utf-8') req_dict = JSONDecoder().decode(req_con) print(req_dict) if req_dict.__contains__("error_message"): print(req_dict.get("error_message")) exit(1) faces = req_dict.get("faces") for index in range(len(faces)): face_token = faces[index].get("face_token") return face_token # 创建画图实例 Draw = photoDraw() # 提取json中关于脸部位置的数据,并且在图片上画出方框 face_rectangle = req_dict.get("faces") for index in range(len(face_rectangle)): face_rectangle_point = face_rectangle[index].get("face_rectangle") Draw.Draw(face_rectangle_point.get("top"),face_rectangle_point.get("left"),face_rectangle_point.get("width"),face_rectangle_point.get("height")) Draw.show()
def post(self, cluster_name): try: json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) project_name = request.args.get('project', "admin") masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() sshKeyPath = dict_data.get('sshKeyPath') jar_dir = dict_data.get('jarDir') base_jar_dir = os.path.basename(jar_dir) jar_name = dict_data.get('jarName') jar_class = dict_data.get('jarClass') jar_params = dict_data.get('jarParams') if not masterIP or not sshKeyPath or not jar_dir or not jar_name or not jar_params: abort(400, message="bad parameter in request body") shell.call("scp -i %s -r %s centos@%s:/home/centos" % (sshKeyPath, jar_dir, masterIP)) if base_jar_dir == 'jars': shell.call("ssh -i %s centos@%s \"sudo su - -c \'cp -R /home/centos/%s /home/hadoop && chown -R hadoop:hadoop /home/hadoop/jars\'\"" \ % (sshKeyPath, masterIP, base_jar_dir)) else: shell.call("ssh -i %s centos@%s \"sudo su - -c \'cp -R /home/centos/%s /home/hadoop && mv /home/hadoop/%s /home/hadoop/jars && chown -R hadoop:hadoop /home/hadoop/jars\'\"" \ % (sshKeyPath, masterIP, base_jar_dir, base_jar_dir)) output = shell.call("ssh -i %s centos@%s \"sudo su - -c \'%s\' hadoop\"" \ %(sshKeyPath, masterIP, submit_job_on_hadoop_cmd(jar_name, jar_class, jar_params))).strip() return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def send_yunpian_message(phone_number, **data): """ 发送yunpian验证码 phone_number: 接受短信手机号 data: message_type: 验证码类别 test: 验证码类: from common import utils utils.send_yunpian_message('13521296223', code='888888') """ if getattr(settings, 'LOCAL', False) or getattr(settings, 'DEV', False): print "local....", data return True message_type = data.get('message_type', u'common') message_types = { u"member_register_count_warning": (YUNPIAN_API_KEY, u"今日新增用户:%(event_name)s,请登录到管理后台查询"), } text = message_types[message_type][1] % data params = { 'apikey': message_types[message_type][0].encode('utf-8'), 'mobile': str(phone_number).encode('utf-8'), 'text': text.encode('utf-8'), } print text host = settings.SMS_HOST url = settings.SMS_URL headers = { 'User-Agent': u'python', 'Content-Type': u'application/x-www-form-urlencoded; charset=UTF-8', } values = urllib.urlencode(params) try: conn = httplib.HTTPSConnection(host) conn.request("POST", url, values, headers) response = conn.getresponse() data = response.read() #print phone_number, 'Response: ', response.status, response.reason, 'Data:', data json_data = JSONDecoder().decode(data) if json_data.get('code') == 0 and json_data.get('msg') == 'OK': return True except Exception, e: print '[error SMS] : 发送短信失败: %s' % e
def post(self, request): data = JSONDecoder().parse(request) access_token = data.get('access_token', '') try: app = SocialApp.objects.get(provider="facebook") token = SocialToken(app=app, token=access_token) # check token against facebook login = fb_complete_login(app, token) login.token = token login.state = SocialLogin.state_from_request(request) # add or update the user into users table ret = complete_social_login(request, login) # if we get here we've succeeded return HttpResponse(status=200, data={ 'success': True, 'username': request.user.username, 'user_id': request.user.pk, }) except: return HttpResponse(status=401 ,data={ 'success': False, 'reason': "Bad Access Token", })
def eod_market_risk_detail_default_close_pd_run(valuation_date): r = utils.get_redis_conn(redis_ip) position_result = r.get(EOD_CUSTOM_POSITION_ + PE_DEFAULT_CLOSE.lower()) position = JSONDecoder().decode(bytes.decode(position_result)) if position: headers = utils.login(data_resource_ip, login_body) reports = eod_market_risk_detail_report_pd.eod_market_risk_detail_report( pd.DataFrame(position)) instrument_contract_result = r.get(EOD_BASIC_INSTRUMENT_CONTRACT_TYPE) instruments_contract_dict = JSONDecoder().decode( bytes.decode(instrument_contract_result)) for item in reports: item['exfsid'] = instruments_contract_dict.get( item.get('underlyerInstrumentId')) params = { 'reportName': '全市场分品种风险报告_交易-收盘-自然日', 'valuationDate': valuation_date, 'reports': reports } utils.call_request(report_ip, 'report-service', 'rptMarketRiskDetailReportCreateBatch', params, headers)
def eod_counter_party_market_risk_by_underlyer_default_close_pd_run( valuation_date): r = utils.get_redis_conn(redis_ip) position_result = r.get(EOD_CUSTOM_POSITION_ + PE_DEFAULT_CLOSE.lower()) position = JSONDecoder().decode(bytes.decode(position_result)) if position: sub_companies_result = r.get(EOD_BASIC_SUB_COMPANIES_FROM_ORACLE) all_sub_companies = JSONDecoder().decode( bytes.decode(sub_companies_result)) headers = utils.login(data_resource_ip, login_body) reports = eod_market_risk_summary_report_pd.eod_counter_party_market_risk_by_underlyer_report( pd.DataFrame(position), all_sub_companies) instrument_contract_result = r.get(EOD_BASIC_INSTRUMENT_CONTRACT_TYPE) instruments_contract_dict = JSONDecoder().decode( bytes.decode(instrument_contract_result)) for item in reports: item['exfsid'] = instruments_contract_dict.get( item.get('underlyerInstrumentId')) params = { 'reportName': '交易对手分品种风险报告_交易-收盘-自然日', 'valuationDate': valuation_date, 'reports': reports } utils.call_request( report_ip, 'report-service', 'rptCounterPartyMarketRiskByUnderlyerReportCreateBatch', params, headers)
def eod_market_risk_by_book_underlyer_pd_run(pricing_environment, valuation_date): r = utils.get_redis_conn(redis_ip) position_result = r.get(EOD_CUSTOM_POSITION_ + pricing_environment.lower()) position = JSONDecoder().decode(bytes.decode(position_result)) if position: headers = utils.login(data_resource_ip, login_body) pe_description = utils.get_pricing_env_description( pricing_environment, data_resource_ip, headers) reports = eod_market_risk_by_book_underlyer_report_pd.eod_market_risk_by_book_underlyer_report( pd.DataFrame(position), pe_description) instrument_contract_result = r.get(EOD_BASIC_INSTRUMENT_CONTRACT_TYPE) instruments_contract_dict = JSONDecoder().decode( bytes.decode(instrument_contract_result)) for item in reports: item['exfsid'] = instruments_contract_dict.get( item.get('underlyerInstrumentId')) params = { 'reportName': MARKET_RISK_BY_BOOK_UNDERLYER_REPORT_ + pe_description, 'valuationDate': valuation_date, 'reports': reports } utils.call_request(report_ip, 'report-service', 'rptMarketRiskBySubUnderlyerReportCreateBatch', params, headers)
async def test_rest_verify( loop: AbstractEventLoop, aiohttp_client: pytest_aiohttp.TestClient, cf: T.Text, is_correct: bool, is_omocode: bool, ) -> None: """Test the /api/verify REST endpoint.""" app = get_app(loop) client = await aiohttp_client(app) resp_blob = await client.get(f"/api/verify?cf={cf}") resp_json = await resp_blob.text() resp = JSONDecoder().decode(resp_json) assert resp.get("cf") == cf assert resp.get("isCorrect") == is_correct assert resp.get("isOmocode") == is_omocode
def eod_classic_scenarios_pd_run(valuation_date): r = utils.get_redis_conn(redis_ip) position_result = r.get(EOD_BASIC_POSITIONS) position = pd.read_msgpack(position_result) if not position.empty: sub_companies_result = r.get(EOD_BASIC_SUB_COMPANIES_FROM_ORACLE) all_sub_companies = JSONDecoder().decode( bytes.decode(sub_companies_result)) headers = utils.login(data_resource_ip, login_body) pe_description = utils.get_pricing_env_description( PE_DEFAULT_CLOSE, data_resource_ip, headers) reports = eod_classic_scenarios_report_pd.eod_classic_scenarios_report( position, all_sub_companies, data_resource_ip, headers, valuation_date, PE_DEFAULT_CLOSE) instrument_contract_result = r.get(EOD_BASIC_INSTRUMENT_CONTRACT_TYPE) instruments_contract_dict = JSONDecoder().decode( bytes.decode(instrument_contract_result)) for item in reports: item['exfsid'] = instruments_contract_dict.get( item.get('underlyerInstrumentId')) params = { 'reportName': CLASSIC_SCENARIO_MARKET_RISK_REPORT_ + pe_description, 'valuationDate': valuation_date, 'reports': reports } utils.call_request(report_ip, 'report-service', 'rptMarketRiskDetailReportCreateBatch', params, headers)
def lookup_direct(self, reference): if reference.type == u"album": endpoint = u"v1/albums/{id}" elif reference.type == u"artist": endpoint = u"v1/artists/{id}" elif reference.type == u"playlist": endpoint = u"v1/users/{user_id}/playlists/{playlist_id}" return None # Unsupported by this plugin elif reference.type == u"track": endpoint = u"v1/tracks/{id}" else: return None # Unsupported by this plugin api_url = self.api_base_url + endpoint.format(id=reference.hash) response = utility.read_url(api_url) if not response: return None try: data = JSONDecoder().decode(response['data']) except ValueError: return None if data.get(u"status"): return None else: return self._format_result(reference.type, data)
def _validate_param(schema_param_name, schema_param_is_mandatory, schema_param_type, params): assert schema_param_name is not None assert schema_param_is_mandatory is not None assert schema_param_type is not None params_obj = JSONDecoder().decode(params) if params_obj.get(schema_param_name) is None: return False return True
def post(self, cluster_name): try: project_name = request.args.get('project', "admin") json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) project_name = request.args.get('project', "admin") size = dict_data.get('size') worker_template_name = dict_data.get('workerTemplateName') if not size: abort(400, message="bad parameter in request body") output = shell.call( scale_sahara_cluster_cmd(project_name, cluster_name, worker_template_name, size)).strip() return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def dataReceived(self, data): """ Parse data we've recieved and send to proof engine Expects JSON will be in the schema {"command":"", "userid":""} """ req_data = JSONDecoder().decode(data) logging.debug("Got: %s", req_data) command = req_data.get('command') userid = req_data.get('userid') resp_data = self.handle_command(userid, command) data = resp_data['response'] if self.serialize: resp_data['response'] = self.do_parse(resp_data['response']) data = JSONEncoder().encode(resp_data) self.transport.write(data) self.transport.loseConnection()
def post(self, cluster_name): try: json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) workDir = dict_data.get('workDir') scriptName = dict_data.get('scriptName', self.scriptName) sshKeyPath = dict_data.get('sshKeyPath') masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() if not workDir or not sshKeyPath or not masterIP: abort(400, message="bad parameter in request body") output = shell.call("ssh -i %s centos@%s \"cd %s; /usr/bin/bash %s slave\"" \ %(sshKeyPath, masterIP, workDir, scriptName)) return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def post(self): try: json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) project_name = request.args.get('project', "admin") cluster_name = dict_data.get('clusterName') template_name = dict_data.get('template') key_pair = dict_data.get('keyPair') neutron_private_network = dict_data.get('privateNetwork') image = dict_data.get('image') if not project_name or not cluster_name or not template_name or not key_pair or not neutron_private_network or not image: abort(400, message="bad parameter in request body") output = shell.call(create_sahara_cluster_from_template_cmd(project_name, cluster_name, template_name, \ key_pair, neutron_private_network, image)).strip() return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def post(self, cluster_name): try: ip_file = "/home/optimizer/%s/slave" % cluster_name project_name = request.args.get('project', "admin") json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) sourceDir = dict_data.get('sourceDir', self.sourceDir) scriptName = dict_data.get('scriptName', self.scriptName) sshKeyPath = dict_data.get('sshKeyPath') masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() if not sshKeyPath or not masterIP: abort(400, message="bad parameter in request body") output = shell.call("/usr/bin/bash %s/%s %s %s %s &" \ %(sourceDir, scriptName, ip_file, sshKeyPath, masterIP), \ workdir=sourceDir) return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def xml_prase(self, html_doc): try: root = soupparser.fromstring(html_doc) sentence_group = root.xpath('//script')[5].xpath('text()')[0].split(';\n g_srp_loadCss();')[0].split('g_page_config = ')[1] g_page_config = JSONDecoder().decode(sentence_group) nav = g_page_config.get('mods').get('nav') if nav: nav_data = nav.get('data') if nav_data.get('common'): for common in nav_data.get('common'): self.words.append(common.get('text')) for word in common.get('sub'): self.words.append(word.get('text')) if nav_data.get('adv'): for adv in nav_data.get('adv'): self.words.append(adv.get('text')) for word in adv.get('sub'): self.words.append(word.get('text')) sortbar = g_page_config.get('mods').get('sortbar') for sortlist in sortbar.get('data').get('sortList'): self.words.append(sortlist.get('name')) self.words.append(sortlist.get('tip')) if sortbar.get('data').get('filter'): for filt in sortbar.get('data').get('filter'): self.words.append(filt.get('title')) personalbar = g_page_config.get('mods').get('personalbar') if personalbar: personalbar_data = personalbar.get('data') if personalbar_data: if personalbar_data.get('metisData'): for shopitems in personalbar_data.get('metisData').get('shopItems'): self.words.append(shopitems.get('text')) except: self.err_words.append(self.query_text) print traceback.format_exc()
def do_login(self): try: if self.check_login(): return True data=(('email', self.addon.get_setting('username')), ('password', self.addon.get_setting('password')), ('forwardUrl', '/')) source = self.net.http_POST(self.base_url + '/doLogin.json?isCraftsyAjax=true', data).content response = JSONDecoder().decode(source) self.net.save_cookies(self.cookie_file) self.net.set_cookies(self.cookie_file) return response.get("success") except Exception as e: print e return False
def send(command, serialize): try: tn = telnetlib.Telnet('localhost', 8003) tn.write(JSONEncoder().encode(dict(userid=0, command=command))) if serialize: proofst = JSONDecoder().decode(tn.read_all()) proofst = proofst.get('response', None) else: proofst = tn.read_all() print proofst except Exception: logging.error("Connection to coqd failed")
def post(self, cluster_name): try: project_name = request.args.get('project', "admin") json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() sshKeyPath = dict_data.get('sshKeyPath') restartDataNode = dict_data.get('restartDataNode', False) restartNameNode = dict_data.get('restartNameNode', False) restartHistoryServer = dict_data.get('restartHistoryServer', False) restartNodeManager = dict_data.get('restartNodeManager', False) vcpuNum = dict_data.get('vcpuNum') memMB = dict_data.get('memMB') vcpuNumOfContainer = dict_data.get('vcpuNumOfContainer') memMBOfContainer = dict_data.get('memMBOfContainer') if not masterIP or not sshKeyPath or not vcpuNum or not memMB or not vcpuNumOfContainer or not memMBOfContainer: abort(400, message="bad parameter in request body") else: project_name = request.args.get('project', "admin") slaveIPArray = shell.call( get_sahara_cluster_s_slavesIP_cmd(project_name, cluster_name)).strip() slaveIPArrayStr = str(slaveIPArray).replace("\n", " ") restartServicesArrayStr = "resourcemanager%s%s%s%s" \ % (" nodemanager" if to_bool(restartNodeManager) else "", \ " namenode" if to_bool(restartNameNode) else "", \ " datanode" if to_bool(restartDataNode) else "", \ " historyserver" if to_bool(restartHistoryServer) else "") script_setting_str = "master_ip=%s\nslave_ip_array=(%s)\ncluster_name=%s\nrestart_services_array=(%s)\nuser=centos\nssh_key=\\\"%s\\\"\nyarn_vcpu_new_value=\\\"<value>%s<\/value>\\\"\nyarn_mem_new_value=\\\"<value>%s<\/value>\\\"\nyarn_container_vcpu_new_value=\\\"<value>%s<\/value>\\\"\nyarn_container_mem_new_value=\\\"<value>%s<\/value>\\\"" \ % (masterIP, slaveIPArrayStr, cluster_name, restartServicesArrayStr, sshKeyPath, str(vcpuNum), str(memMB), str(vcpuNumOfContainer), str(memMBOfContainer)) work_path = "/home/optimizer/%s" % cluster_name if not os.path.exists(work_path): os.makedirs(work_path) shell.call("echo \"%s\" > %s/cluster_yarn.conf" % (script_setting_str, work_path)) shell.call( "/usr/bin/cp -f /home/optimizer/scripts/cluster_yarn.sh %s" % (work_path)) output = shell.call( "/usr/bin/bash cluster_yarn.sh reconfigure", workdir=work_path) return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def listen_on_redis_pubsub(): r = redis.StrictRedis(db=REDIS_PUBSUB_DB) p = r.pubsub(ignore_subscribe_messages=True) p.subscribe(REDIS_PUBSUB_TAG + ":->login") for m in p.listen(): logger.info('receive message: {0}'.format(m)) login_data = JSONDecoder().decode(m['data']) user_id = login_data.get('user_id') token = login_data.get('token') data = { 'event': 'login', 'login': True, 'receiver_id': user_id, } if AuthService().check_auth_token(user_id, token): user = UserService().get(id=user_id) data['user'] = UserService().serialize(user) else: data['login'] = False redis_tools.publish_redis_message(data) connection.close()
def detectiveFace(imgBase64): try: headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0'} http_url = "https://aip.baidubce.com/rest/2.0/face/v3/detect" data = {"access_token": "24.d29490bf4dafd261f7144c1b5f98c804.2592000.1561108982.282335-16318030", "Content-Type":"application/json", "image_type": "BASE64", "image": imgBase64 } # 编码 data = urllib.parse.urlencode(data).encode('utf-8') response = urllib.request.Request(url=http_url, data=data, headers=headers) req = urllib.request.urlopen(response, timeout=5) content = req.read().decode('utf-8') req_dict = JSONDecoder().decode(content) req.close() if req_dict["error_code"]==0 and req_dict["error_msg"]=='SUCCESS': if req_dict.get('result').get('face_num')==1: location = req_dict.get('result').get('face_list')[0].get('location') left=location['left'] top = location['top'] width = location['width'] height= location['height'] from matplotlib import pyplot as plt img=base64_to_image(imgBase64) imgCrop=img.crop((left,top,left+width,top+height)) savePath=os.path.join('student/tempImg/test.jpg') imgCrop=imgCrop.convert('RGB') imgCrop.save(savePath) base64_str = image_to_base64(savePath) return {'state':1,'imgBase64':base64_str} return {'state':0} except urllib.error.HTTPError as err: raise ConnectionError
def post(self, cluster_name): try: project_name = request.args.get('project', "admin") json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() sshKeyPath = dict_data.get('sshKeyPath') rpms_dir = dict_data.get('rpmsDir', '/home/optimizer/rpms/ganglia-rpms') if not masterIP or not sshKeyPath: abort(400, message="bad parameter in request body") else: project_name = request.args.get('project', "admin") slaveIPArray = shell.call( get_sahara_cluster_s_slavesIP_cmd(project_name, cluster_name)).strip() slaveIPArrayStr = str(slaveIPArray).replace("\n", " ") script_setting_str = "master_ip=%s\nslave_ip_array=(%s)\ncluster_name=%s\nuser=centos\nssh_key=\\\"%s\\\"\nganglia_rpms=\\\"%s\\\"\n" \ % (masterIP, slaveIPArrayStr, cluster_name, sshKeyPath, rpms_dir) work_path = "/home/optimizer/%s" % cluster_name if not os.path.exists(work_path): os.makedirs(work_path) shell.call("echo \"%s\" > %s/cluster_yarn.conf" % (script_setting_str, work_path)) shell.call( "/usr/bin/cp -f /home/optimizer/scripts/cluster_yarn.sh %s" % (work_path)) output = shell.call( "/usr/bin/bash cluster_yarn.sh restart-ganglia", workdir=work_path) return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def get_next_page_comment(self, article_id): res = requests.get('https://newcomment.detik.com/graphql?query=' '{search(type:"comment",size:' + str(self.comment_count) + ',page:1,sort:"newest",' 'query:[{name:"news.artikel",terms:"' + article_id + '"},{name:"news.site",terms:"dtk"}]){' 'paging counterparent hits{' 'posisi results{' 'id content news create_date}}}}') if res.status_code != 200: self.req_error = True return else: self.req_error = False py_obj = JSONDecoder().decode(res.text) try: result = py_obj.get('data').get('search').get('hits').get( 'results') for r in result: with open('data.csv', 'a', encoding='utf-8') as myfile: if article_id in self.history: if r.get('id') != self.history[article_id]: try: myfile.write(article_id + ';' + r.get('news').get('date') + ';' + r.get('content') + ';' + r.get('create_date') + '\n') except: pass else: break else: try: myfile.write(article_id + ';' + r.get('news').get('date') + ';' + r.get('content') + ';' + r.get('create_date') + '\n') self.history[article_id] = r.get('id') except: pass self.req_error = False except: self.req_error = True return with open('history.csv', 'w') as history_file: for key, value in self.history.items(): history_file.write(key + ';' + value + ';\n')
def exec_cmd(command, session): host = g.config.get('COQD_HOST') port = g.config.get('COQD_PORT') try: tn = telnetlib.Telnet(host, port) tn.write(JSONEncoder().encode(dict(userid=str(session['id']), command=command))) if g.config.get('serialize'): proofst = JSONDecoder().decode(tn.read_all()) data = proofst.get('response', None) else: data = tn.read_all() return data except Exception: logging.error("Connection to coqd failed") raise ProofException()
class Config: def __init__(self, path, values = None): self.path = path if values: self.values = values else: self.values = {} def load(self): try: settings = codecs.open(self.path, "r", "utf8").read() except IOError: return False try: self.values = JSONDecoder().decode(settings) except ValueError: return False return True def save(self): try: f = codecs.open(self.path, "w", "utf8") f.write(JSONEncoder().encode(self.values)) f.close() except IOError: return False return True def get(self, key, default): return self.values.get(key, default) def set(self, key, value): self.values[key] = value
def readConfigFile(self, fileName): configurationFile = open(fileName, 'r') configuration = configurationFile.read() configurationFile.close() content = JSONDecoder().decode(configuration) self.configuration = content.get('configuration') self.__logMsg(logging.INFO, "Reading configuration...") self.temperatures = self.configuration.get('temperatures') self.__logMsg(logging.INFO, "Temperatures: %s" % (self.temperatures)) self.intervallSettings = self.configuration.get('scheduler').get('baseTasks') self.__logMsg(logging.INFO, "IntervallSettings: %s" % (self.intervallSettings)) self.heatingTasks = self.configuration.get('scheduler').get('heatingTasks').get("task") self.__logMsg(logging.INFO, "HeatingTasks: %s" % (len(self.heatingTasks))) return configuration
def eod_spot_scenarios_by_market_pd_run(pricing_environment, valuation_date): r = utils.get_redis_conn(redis_ip) position_result = r.get(EOD_BASIC_POSITIONS) position = pd.read_msgpack(position_result) if not position.empty: sub_companies_result = r.get(EOD_BASIC_SUB_COMPANIES_FROM_ORACLE) all_sub_companies = JSONDecoder().decode( bytes.decode(sub_companies_result)) headers = utils.login(data_resource_ip, login_body) pe_description = utils.get_pricing_env_description( pricing_environment, data_resource_ip, headers) reports = eod_spot_scenarios_by_market_report_pd.eod_spot_scenarios_by_market_report( pd.DataFrame(position), data_resource_ip, headers, pe_description, all_sub_companies, valuation_date, pricing_environment) instrument_contract_result = r.get(EOD_BASIC_INSTRUMENT_CONTRACT_TYPE) instruments_contract_dict = JSONDecoder().decode( bytes.decode(instrument_contract_result)) for item in reports: item['exfsid'] = instruments_contract_dict.get( item.get('instrumentId')) utils.call_request(report_ip, 'report-service', 'rptDeleteSpotScenariosReportByDate', {'valuationDate': valuation_date}, headers) chunk_size = 1000 for start in range(0, len(reports), chunk_size): end = start + chunk_size params = { 'valuationDate': valuation_date, 'reports': reports[start:end] } utils.call_request(report_ip, 'report-service', 'rptSpotScenariosReportCreateBatch', params, headers)
import requests import testDraw from testDraw import photoDraw from json import JSONDecoder # 调用Face++的API识别图片中的人脸 http_url = "https://api-cn.faceplusplus.com/facepp/v3/faceset/create" key = "JPofXVcqsEro1KO7pseh9H2KFXLuTjns" secret = "vL2SaCF41T78-S-iz0DI-v4rrGxn1cuP" filepath = "F:\\testPh\\liudehua.jpg" outer = "faceName" data = {"api_key": key, "api_secret": secret, "outer_id": outer} # files = {"image_file": open(filepath, "rb")} response = requests.post(http_url, data=data) req_con = response.content.decode('utf-8') req_dict = JSONDecoder().decode(req_con) print(req_dict) if req_dict.__contains__("error_message"): print(req_dict.get("error_message")) exit(1)
class AttestationToken(Generic[T]): """ Represents a token returned from the attestation service. :keyword Any body: The body of the newly created token, if provided. :keyword azure.security.attestation.AttestationSigningKey signer: If specified, the key used to sign the token. If the `signer` property is not specified, the token created is unsecured. :keyword str token: If no body or signer is provided, the string representation of the token. :keyword Type body_type: The underlying type of the body of the 'token' parameter, used to deserialize the underlying body when parsing the token. """ def __init__(self, **kwargs): token = kwargs.get('token') if token is None: body = kwargs.get('body') # type: Any signer = kwargs.get('signer') # type: AttestationSigningKey if signer: token = self._create_secured_jwt(body, signer) else: token = self._create_unsecured_jwt(body) self._token = token self._body_type = kwargs.get('body_type') #type: Type token_parts = token.split('.') if len(token_parts) != 3: raise ValueError("Malformed JSON Web Token") self.header_bytes = Base64Url.decode(token_parts[0]) self.body_bytes = Base64Url.decode(token_parts[1]) self.signature_bytes = Base64Url.decode(token_parts[2]) if len(self.body_bytes) != 0: self._body = JSONDecoder().decode(self.body_bytes.decode('ascii')) else: self._body = None self._header = JSONDecoder().decode(self.header_bytes.decode('ascii')) def __str__(self): return self._token @property def algorithm(self): #type:() -> Union[str, None] """ Json Web Token Header "alg". See `RFC 7515 Section 4.1.1 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.1>`_ for details. If the value of algorithm is "none" it indicates that the token is unsecured. """ return self._header.get('alg') @property def key_id(self): #type:() -> Union[str, None] """ Json Web Token Header "kid". See `RFC 7515 Section 4.1.4 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.4>`_ for details. """ return self._header.get('kid') @property def expiration_time(self): #type:() -> Union[datetime, None] """ Expiration time for the token. """ exp = self._body.get('exp') if exp: return datetime.fromtimestamp(exp) return None @property def not_before_time(self): #type:() -> Union[datetime, None] """ Time before which the token is invalid. """ nbf = self._body.get('nbf') if nbf: return datetime.fromtimestamp(nbf) return None @property def issuance_time(self): #type:() -> Union[datetime, None] """ Time when the token was issued. """ iat = self._body.get('iat') if iat: return datetime.fromtimestamp(iat) return None @property def content_type(self): #type:() -> Union[str, None] """ Json Web Token Header "content type". See `RFC 7515 Section 4.1.10 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.10>`_ for details. """ return self._header.get('cty') @property def critical(self): #type() -> # type: Optional[bool] """ Json Web Token Header "Critical". See `RFC 7515 Section 4.1.11 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.11>`_ for details. """ return self._header.get('crit') @property def key_url(self): #type:() -> Union[str, None] """ Json Web Token Header "Key URL". See `RFC 7515 Section 4.1.2 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.2>`_ for details. """ return self._header.get('jku') @property def x509_url(self): #type:() -> Union[str, None] """ Json Web Token Header "X509 URL". See `RFC 7515 Section 4.1.5 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.5>`_ for details. """ return self._header.get('x5u') @property def type(self): #type:() -> Union[str, None] """ Json Web Token Header "typ". `RFC 7515 Section 4.1.9 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.9>`_ for details. """ return self._header.get('typ') @property def certificate_thumbprint(self): #type:() -> Union[str, None] """ The "thumbprint" of the certificate used to sign the request. `RFC 7515 Section 4.1.7 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.7>`_ for details. """ return self._header.get('x5t') @property def certificate_sha256_thumbprint(self): #type:() -> Union[str, None] """ The "thumbprint" of the certificate used to sign the request generated using the SHA256 algorithm. `RFC 7515 Section 4.1.8 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.8>`_ for details. """ return self._header.get('x5t#256') @property def issuer(self): #type:() -> Union[str, None] """ Json Web Token "iss" claim. `RFC 7519 Section 4.1.1 <https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.1>`_ for details. """ return self._body.get('iss') @property def x509_certificate_chain(self): #type:() -> Union[list[str], None] """ An array of Base64 encoded X.509 certificates which represent a certificate chain used to sign the token. See `RFC 7515 Section 4.1.6 <https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1.6>`_ for details. """ x5c = self._header.get('x5c') if x5c is not None: return x5c return None def _json_web_key(self): #type:() -> Union[JSONWebKey, None] jwk = self._header.get('jwk') return JSONWebKey.deserialize(jwk) def serialize(self): """ Serialize the JSON Web Token to a string""" return self._token def validate_token(self, options=None, signers=None): # type: (TokenValidationOptions, list[AttestationSigner]) -> bool """ Validate the attestation token based on the options specified in the :class:`TokenValidationOptions`. :param azure.security.attestation.TokenValidationOptions options: Options to be used when validating the token. :param list[azure.security.attestation.AttestationSigner] signers: Potential signers for the token. If the signers parameter is specified, validate_token will only consider the signers as potential signatories for the token, otherwise it will consider attributes in the header of the token. :return bool: Returns True if the token successfully validated, False otherwise. :raises: azure.security.attestation.AttestationTokenValidationException """ if (options is None): options = TokenValidationOptions(validate_token=True, validate_signature=True, validate_expiration=True) if not options.validate_token: self._validate_static_properties(options) if (options.validation_callback is not None): options.validation_callback(self, None) return True signer = None if self.algorithm != 'none' and options.validate_signature: # validate the signature for the token. candidate_certificates = self._get_candidate_signing_certificates( signers) signer = self._validate_signature(candidate_certificates) if (signer is None): raise AttestationTokenValidationException( "Could not find the certificate used to sign the token.") self._validate_static_properties(options) if (options.validation_callback is not None): if options.validation_callback(self, signer): return True raise AttestationTokenValidationException( "User validation callback failed the validation request.") return True def get_body(self): # type: () -> T """ Returns the body of the attestation token as an object. """ try: return self._body_type.deserialize(self._body) except AttributeError: return self._body def _get_candidate_signing_certificates(self, signing_certificates): # type: (list[AttestationSigner]) -> list[AttestationSigner] candidates = [] desired_key_id = self.key_id if desired_key_id is not None: for signer in signing_certificates: if (signer.key_id == desired_key_id): candidates.append(signer) break # If we didn't find a matching key ID in the supplied certificates, # try the JWS header to see if there might be a corresponding key. if (len(candidates) == 0): jwk = self._json_web_key() if jwk is not None: if jwk.kid == desired_key_id: if (jwk.x5_c): signers = jwk.x5_c candidates.append( AttestationSigner(signers, desired_key_id)) else: # We don't have a signer, so we need to try every possible signer. # If the caller provided a list of certificates, use that as the exclusive source, # otherwise iterate through the possible certificates. if signing_certificates is not None: for signer in signing_certificates: candidates.append(signer) else: jwk = self._json_web_key() if jwk.x5_c is not None: signers = self._json_web_key().x5_c candidates.append(AttestationSigner(signers, None)) candidates.append(self.x509_certificate_chain) return candidates def _get_certificates_from_x5c(self, x5clist): # type:(list[str]) -> list[Certificate] return [base64.b64decode(b64cert) for b64cert in x5clist] def _validate_signature(self, candidate_certificates): # type:(list[AttestationSigner]) -> AttestationSigner signed_data = Base64Url.encode( self.header_bytes) + '.' + Base64Url.encode(self.body_bytes) for signer in candidate_certificates: cert = load_der_x509_certificate(signer.certificates[0], backend=default_backend()) signer_key = cert.public_key() # Try to verify the signature with this candidate. # If it doesn't work, try the next signer. try: if isinstance(signer_key, RSAPublicKey): signer_key.verify(self.signature_bytes, signed_data.encode('utf-8'), padding.PKCS1v15(), SHA256()) else: signer_key.verify(self.signature_bytes, signed_data.encode('utf-8'), SHA256()) return signer except: raise AttestationTokenValidationException( "Could not verify signature of attestatoin token.") return None def _validate_static_properties(self, options): # type:(TokenValidationOptions) -> bool """ Validate the static properties in the attestation token. """ if self._body: time_now = datetime.now() if options.validate_expiration and self.expiration_time is not None: if (time_now > self.expiration_time): delta = time_now - self.expiration_time if delta.total_seconds() > options.validation_slack: raise AttestationTokenValidationException( u'Token is expired. Now: {}, Not Before: {}'. format(time_now.isoformat(), self.not_before_time.isoformat())) if options.validate_not_before and hasattr( self, 'not_before_time') and self.not_before_time is not None: if (time_now < self.not_before_time): delta = self.not_before_time - time_now if delta.total_seconds() > options.validation_slack: raise AttestationTokenValidationException( u'Token is not yet valid. Now: {}, Not Before: {}'. format(time_now.isoformat(), self.not_before_time.isoformat())) if options.validate_issuer and hasattr( self, 'issuer') and self.issuer is not None: if (options.issuer != self.issuer): raise AttestationTokenValidationException( u'Issuer in token: {} is not the expected issuer: {}.'. format(self.issuer, options.issuer)) return True @staticmethod def _create_unsecured_jwt(body): # type: (Any) -> str """ Return an unsecured JWT expressing the body. """ # Base64Url encoded '{"alg":"none"}'. See https://www.rfc-editor.org/rfc/rfc7515.html#appendix-A.5 for more information. return_value = "eyJhbGciOiJub25lIn0." # Try to serialize the body by asking the body object to serialize itself. # This normalizes the attributes in the body object to conform to the serialized attributes used # for transmission to the service. try: body = body.serialize() except AttributeError: pass json_body = '' if body is not None: json_body = JSONEncoder().encode(body) return_value += Base64Url.encode(json_body.encode('utf-8')) return_value += '.' return return_value @staticmethod def _create_secured_jwt(body, signer): # type: (Any, AttestationSigningKey) -> str """ Return a secured JWT expressing the body, secured with the specified signing key. :param Any body: The body of the token to be serialized. :param AttestationSigningKey signer: the certificate and key to sign the token. """ header = { "alg": "RSA256" if isinstance(signer._signing_key, RSAPrivateKey) else "ECDH256", "jwk": { "x5c": [ base64.b64encode( signer._certificate.public_bytes( Encoding.DER)).decode('utf-8') ] } } json_header = JSONEncoder().encode(header) return_value = Base64Url.encode(json_header.encode('utf-8')) try: body = body.serialize() except AttributeError: pass json_body = '' if body is not None: json_body = JSONEncoder().encode(body) return_value += '.' return_value += Base64Url.encode(json_body.encode('utf-8')) # Now we want to sign the return_value. if isinstance(signer._signing_key, RSAPrivateKey): signature = signer._signing_key.sign(return_value.encode('utf-8'), algorithm=SHA256(), padding=padding.PKCS1v15()) else: signature = signer._signing_key.sign(return_value.encode('utf-8'), algorithm=SHA256()) # And finally append the base64url encoded signature. return_value += '.' return_value += Base64Url.encode(signature) return return_value
outer = "faceName" filePath = "./photod/cap0.jpg" data = {"api_key": key, "api_secret": secret, "outer_id": outer} files = {"image_file": open(filePath, "rb")} response = requests.post(http_url, data=data, files=files) req_con = response.content.decode('utf-8') req_dict = JSONDecoder().decode(req_con) print(req_dict) if req_dict.__contains__("error_message"): print(req_dict.get("error_message")) exit(1) # 创建画图实例 Draw = photoDraw(filePath) # 提取json中关于脸部位置的数据,并且在图片上画出方框 face_rectangle = req_dict.get("faces") for index in range(len(face_rectangle)): face_rectangle_point = face_rectangle[index].get("face_rectangle") Draw.Draw(face_rectangle_point.get("top"), face_rectangle_point.get("left"), face_rectangle_point.get("width"), face_rectangle_point.get("height")) face_token_result = req_dict.get('results')[0].get('face_token')
def parse(self, code): try: json_object = JSONDecoder().decode(code) except ValueError: json_object = {} self.main = json_object.get('main')
def post(self, cluster_name): try: work_path = "/home/optimizer/%s" % cluster_name if not os.path.exists(work_path): os.makedirs(work_path) project_name = request.args.get('project', "admin") json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) jobID = dict_data.get('jobID') workDir = dict_data.get('workDir') compute_node_max_cpu_core = dict_data.get('computeNodeMaxCpuCore') compute_node_max_memory_gb = dict_data.get( 'computeNodeMaxMemoryGb') compute_node_num = dict_data.get('computeNodeNum') scriptName = dict_data.get('scriptName', self.scriptName) sshKeyPath = dict_data.get('sshKeyPath') masterIP = shell.call( get_sahara_cluster_s_masterIP_cmd(project_name, cluster_name)).strip() if not workDir or not sshKeyPath or not masterIP or not jobID or not compute_node_max_cpu_core or not compute_node_max_memory_gb or not compute_node_num: abort(400, message="bad parameter in request body") jhist_json_path = "%s/%s-trace.json" % (work_path, jobID) topology_json_path = "%s/%s-topology.json" % (work_path, jobID) jhist_path = shell.call("ssh -i %s centos@%s \"sudo su - -c \'%s\' hadoop\"" \ %(sshKeyPath, masterIP, find_jhist_file_in_hdfs_cmd(jobID))).strip() shell.call("ssh -i %s centos@%s \"sudo su - -c \'%s\' hadoop\"" \ %(sshKeyPath, masterIP, analysis_job_with_hadoop_rumen_cmd(cluster_name, jobID, jhist_path))) shell.call("ssh -i %s centos@%s \"sudo su - -c \'cat /home/hadoop/%s/%s-trace.json\' hadoop\" > %s" \ % (sshKeyPath, masterIP, cluster_name, jobID, jhist_json_path)) shell.call("ssh -i %s centos@%s \"sudo su - -c \'cat /home/hadoop/%s/%s-topology.json\' hadoop\" > %s" \ % (sshKeyPath, masterIP, cluster_name, jobID, topology_json_path)) get_yarn_max_cpu = shell.call( "ssh -i %s centos@%s \"sudo grep -A 1 \'yarn.scheduler.maximum-allocation-vcores\' /opt/hadoop/etc/hadoop/yarn-site.xml | tail -1 \"" % (sshKeyPath, masterIP)).strip() get_yarn_max_memory_mb = shell.call( "ssh -i %s centos@%s \"sudo grep -A 1 \'yarn.scheduler.maximum-allocation-mb\' /opt/hadoop/etc/hadoop/yarn-site.xml | tail -1 \"" % (sshKeyPath, masterIP)).strip() get_yarn_container_cpu = shell.call( "ssh -i %s centos@%s \"sudo grep -A 1 \'yarn.scheduler.minimum-allocation-vcores\' /opt/hadoop/etc/hadoop/yarn-site.xml | tail -1 \"" % (sshKeyPath, masterIP)).strip() get_yarn_container_memory_mb = shell.call( "ssh -i %s centos@%s \"sudo grep -A 1 \'yarn.scheduler.minimum-allocation-mb\' /opt/hadoop/etc/hadoop/yarn-site.xml | tail -1 \"" % (sshKeyPath, masterIP)).strip() yarn_max_cpu = int(re.sub( r'\D', '', get_yarn_max_cpu)) if get_yarn_max_cpu else 8 yarn_max_memory_mb = int(re.sub( r'\D', '', get_yarn_max_memory_mb)) if get_yarn_max_memory_mb else 8192 yarn_container_cpu = int(re.sub( r'\D', '', get_yarn_container_cpu)) if get_yarn_container_cpu else 1 yarn_container_memory_mb = int( re.sub(r'\D', '', get_yarn_container_memory_mb) ) if get_yarn_container_memory_mb else 1024 yarn_cluster_workers_number = self._get_workers_number_from_topology_json( topology_json_path) actual_workers = int( shell.call( "ssh -i %s centos@%s \"grep %s /etc/hosts | wc -l\"" % (sshKeyPath, masterIP, cluster_name)).strip()) - 1 output = self._analysis_jhist_json( jhist_json_path, yarn_cluster_workers_number, actual_workers, yarn_max_memory_mb, yarn_max_cpu, yarn_container_memory_mb, yarn_container_cpu, compute_node_max_memory_gb, compute_node_max_cpu_core, compute_node_num) return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")
def post(self): try: json_data = request.get_json(force=True) json_str = json.dumps(json_data) dict_data = JSONDecoder().decode(json_str) project_name = request.args.get('project', "admin") master_node_template_name = dict_data.get('masterNodeTemplateName') master_node_template_plugin = dict_data.get( 'masterNodeTemplatePlugin') master_node_template_plugin_version = dict_data.get( 'masterNodeTemplatePluginVersion') master_node_template_processes = dict_data.get( 'masterNodeTemplateProcesses') master_node_template_flavor = dict_data.get( 'masterNodeTemplateFlavor') master_node_template_floating_ip_pool = dict_data.get( 'masterNodeTemplateFloatingIpPool') worker_node_template_name = dict_data.get('workerNodeTemplateName') worker_node_template_plugin = dict_data.get( 'workerNodeTemplatePlugin') worker_node_template_plugin_version = dict_data.get( 'workerNodeTemplatePluginVersion') worker_node_template_processes = dict_data.get( 'workerNodeTemplateProcesses') worker_node_template_flavor = dict_data.get( 'workerNodeTemplateFlavor') worker_node_template_floating_ip_pool = dict_data.get( 'workerNodeTemplateFloatingIpPool') cluster_template_name = dict_data.get('clusterTemplateName') cluster_worker_count = dict_data.get('clusterWorkerCount') if not master_node_template_name or not master_node_template_plugin or not master_node_template_plugin_version \ or not master_node_template_processes or not master_node_template_flavor or not master_node_template_floating_ip_pool \ or not worker_node_template_name or not worker_node_template_plugin or not worker_node_template_plugin_version \ or not worker_node_template_processes or not worker_node_template_flavor or not worker_node_template_floating_ip_pool \ or not cluster_template_name or not cluster_worker_count: abort(400, message="bad parameter in request body") shell.call(create_sahara_node_group_template_cmd(project_name, master_node_template_name, master_node_template_plugin, \ master_node_template_plugin_version, master_node_template_processes, \ master_node_template_flavor, master_node_template_floating_ip_pool)) shell.call(create_sahara_node_group_template_cmd(project_name, worker_node_template_name, worker_node_template_plugin, \ worker_node_template_plugin_version, worker_node_template_processes, \ worker_node_template_flavor, worker_node_template_floating_ip_pool)) output = shell.call(create_sahara_cluster_template_cmd(project_name, cluster_template_name, master_node_template_name, \ worker_node_template_name, cluster_worker_count)).strip() return output_json(output, 200) except Exception: log.exception(traceback.format_exc()) abort(400, message="Request failed")