def sign_up(): res = request.get_json() username = res['username'] email = res['email'] password = res['password'] new_record = dict() new_record['username'] = username new_record['email'] = email new_record['password'] = password try: res = user_credentials_collection.insert(new_record) return_json = {'input': new_record, 'successful': True} return utils.parse_json(return_json), 200 except: return utils.parse_json({ 'input': new_record, 'successful': False }), 200
def update_spark_info(n, spark_info_text): spark_info = parse_json(spark_info_text) livy = LivyRequests() if "session-url" not in spark_info.keys(): print(spark_info) return prettify_json(spark_info) spark_info = livy.session_info(spark_info["session-url"]) return prettify_json(spark_info)
def get_ssr_conf(ssr_conf_path: str) -> Dict: _ssr_conf: Dict = utils.parse_json(ssr_conf_path) if not _ssr_conf: exit.error('Require ssr-config.') # -- check params -- port = _ssr_conf.get('server_port') if port is None: exit.error('Require \'server_port\'.') if type(port) != int or port <= 0: exit.error('Illegal \'server_port\'.') password = _ssr_conf.get('password') if common.is_blank(password): exit.error('Require \'password\'.') method = _ssr_conf.get('method') if common.is_blank(method): exit.error('Require \'method\'.') if not encrypt.is_supported(method): exit.error(f'Not supported method [{method}]') protocol = _ssr_conf.get('protocol') if common.is_blank(protocol): exit.error('Require \'protocol\'.') obfs = _ssr_conf.get('obfs') if common.is_blank(obfs): exit.error('Require \'obfs\'.') # -- default params -- _ssr_conf['server'] = '::' _ssr_conf['password'] = common.to_bytes(_ssr_conf['password']) _ssr_conf['protocol_param'] = _ssr_conf.get('protocol_param', '') _ssr_conf['obfs_param'] = _ssr_conf.get('obfs_param', '') # process default data try: _ssr_conf['forbidden_ip'] = \ common.IPNetwork(_ssr_conf.get('forbidden_ip', '127.0.0.0/8,::1/128')) except Exception as e: exit.error('error configuration \'forbidden_ip\'.') try: _ssr_conf['forbidden_port'] = common.PortRange( _ssr_conf.get('forbidden_port', '')) except Exception as e: exit.error('error configuration \'forbidden_port\'.') try: _ssr_conf['ignore_bind'] = \ common.IPNetwork(_ssr_conf.get('ignore_bind', '127.0.0.0/8,::1/128,10.0.0.0/8,192.168.0.0/16')) except Exception as e: exit.error('error configuration \'ignore_bind\'.') return _ssr_conf
def run_job(n_clicks, session_info_text, modifier, transform_func): print("run_job") session_info = parse_json(session_info_text) if session_info["state"] != spark_states.IDLE: return "" job = get_job_data(modifier, transform_func) livy = LivyRequests() job_info = livy.run_job(session_info["session-url"], job) return job_info["statement-url"]
def update_graph(input_value, property, year): n_clicks = input_value #Substitute with input values if n_clicks > 0: templated_string = template.substitute(modifier=property) job = {"code": templated_string} job_info = LivyRequests().run_job(idle_session_url, job) statement_url = job_info["statement-url"] #after running the job need to wait for output. status will change to available and output to ok. while (True): statement_response = LivyRequests().job_info(statement_url) if statement_response["state"] == "available": while (True): # print("checking the output object") if statement_response["output"] is not None: print("in none") break else: print("still in while") data = statement_response["output"]["data"] payload = parse_json(data["text/plain"]) dfe = pd.DataFrame(list(zip(*payload["x"])), columns=payload["y"]) x = (dfe['forecast_period_end_date']) y = (dfe['avg_net_income_pq']) break else: time.sleep(1) else: print("default values to start with") x = [1, 2, 3, 4] y = [3, 4, 5, 6] return { "data": [ go.Scatter( x=x, y=y, mode="markers", marker={ "size": 15, "line": { "width": 0.5, "color": "white" }, }, ) ], "layout": go.Layout(margin={ "b": 40, "t": 10, "r": 0 }, hovermode="closest") }
async def get(self, request): try: search = request.args["search"][0] pipeline = {"$text": {"$search": search}} except KeyError: pipeline = {} users = Movie.get_collection().find(pipeline, ) users = await users.to_list(10) data = parse_json(users) # data = [parse_json(users) for user in users] return json(data)
def read_total_fund(self): """ 初始化时用于读取全量基金类型使用 :return: """ try: if not os.path.exists(self.total_fund_file): raise OSError("全量基金文件不存在") self.get_fund_type_list() self.total_fund = parse_json(self.total_fund_file) except OSError as e: self.logger.waring("读取全量基金失败,文件不存在:{}".format(e))
def test_packages_with_contents(client): """Start with a test database.""" populate_test_data(db.session) db.session.commit() rv = client.get("/api/packages/") packages = parse_json(rv.data) assert len(packages) > 0 assert len(packages) == Package.query.filter_by(approved=True).count() validate_package_list(packages)
def getCompileJsListByModule(self, module): jsList = [] preList = [] postList = [] gameConfigPath = os.path.join(self.proj_path, "games", module, "wxpack.json") if os.path.exists(gameConfigPath): gameConfig = utils.parse_json(gameConfigPath) preposition = gameConfig.get("preposition", []) for preFile in preposition: jsList.append(preFile) preList.append(preFile) postposition = gameConfig.get("postposition", []) for postFile in postposition: postList.append(postFile) else: Logging.log_msg("wxpack.json 文件不存在 %s" % gameConfigPath) for parent, dirnames, filenames in os.walk(self.games_path): relpath = os.path.relpath(parent, os.path.join(parent, "..")) isSamePath = os.path.normpath( os.path.abspath(os.path.join( parent, ".."))) == os.path.normpath( os.path.abspath(self.games_path)) if module == relpath and isSamePath == True: for p, dirname, filenames in os.walk(parent): for filename in filenames: token = filename.split(".") if len(token) != 2 or filename.split(".")[1] != "js": continue filename = os.path.join(os.path.normpath(p), filename) filename = os.path.relpath(filename, self.proj_path) filename = filename.replace("\\", "/") isPass = False for postFile in postList: if postFile == filename: isPass = True for preFile in preList: if preFile == filename: isPass = True if 'manifest' in filename: isPass = True if isPass == False: jsList.append(filename) for postFile in postList: jsList.append(postFile) return jsList
def _modify_proj_gradle_config(self, libs): print("start modify gradle config") jsonParse = utils.parse_json(self.packageConfig_path) jsonParse['libraryList'] = libs[:] # save package.json projectFile = open(self.packageConfig_path, 'w+') try: str = json.dumps(jsonParse, ensure_ascii=False, indent=4, separators=(',', ':')) projectFile.write(str.encode('utf-8')) finally: projectFile.close()
def get_multi_item_stock_new(self, sku_ids, area): """获取多个商品库存状态(新) 当所有商品都有货,返回True;否则,返回False。 :param sku_ids: 多个商品的id。可以传入中间用英文逗号的分割字符串,如"123,456" :param area: 地区id :return: 多个商品是否同时有货 True/False """ items_dict = parse_sku_id(sku_ids=sku_ids) area_id = parse_area_id(area=area) url = 'https://c0.3.cn/stocks' payload = { 'callback': 'jQuery{}'.format(random.randint(1000000, 9999999)), 'type': 'getstocks', 'skuIds': ','.join(items_dict.keys()), 'area': area_id, '_': str(int(time.time() * 1000)) } headers = {'User-Agent': self.user_agent} resp_text = '' try: resp_text = requests.get(url=url, params=payload, headers=headers, timeout=self.timeout).text stock = True for sku_id, info in parse_json(resp_text).items(): sku_state = info.get('skuState') # 商品是否上架 stock_state = info.get('StockState') # 商品库存状态 if sku_state == 1 and stock_state in (33, 40): continue else: stock = False break return stock except requests.exceptions.Timeout: logger.error('查询 %s 库存信息超时(%ss)', list(items_dict.keys()), self.timeout) return False except requests.exceptions.RequestException as request_exception: logger.error('查询 %s 库存信息发生网络请求异常:%s', list(items_dict.keys()), request_exception) return False except Exception as e: logger.error('查询 %s 库存信息发生异常, resp: %s, exception: %s', list(items_dict.keys()), resp_text, e) return False
def __init__(self): # 浏览器头 self.headers = { 'content-type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0' } self.pattern = r'^jsonpgz\((.*)\)' self.total_fund = None self.last_update_time = None self.global_config = parse_json(default_config_path) self.total_fund_file = self.global_config["total_fund_path"] self.target_fund = self.global_config["target_fund"] self.logger = MyLogger("monitor.py - Fund Monitor").get_logger()
def import_on(pagename, infos): for name, unencoded_value in infos.iteritems(): # Check to see that attribute exists. attribute = format_attribute(name) datatype = guess_datatype(name, unencoded_value) value = format_value(unencoded_value, name, datatype) get_attribute_json = parse_json(api.page_info_attribute.get) if not get_attribute_json(attribute=attribute)['objects']: api.page_info_attribute.post({ 'description': 'Add something here', 'datatype': datatype, 'name': name, 'attribute': attribute, 'required': False }) # Check to make sure the datatype of the attribute is the same # as what we're adding. pass # Lookup page URI try: get_page_json = parse_json(api.page.get) page_uri = get_page_json(name=pagename)['objects'][0]['resource_uri'] except IndexError: # No page with that name, let's continue continue api.page_info.post({ 'page': page_uri, 'attribute': attribute, 'value': value })
def test_packages_with_protocol_low(client): """Start with a test database.""" populate_test_data(db.session) db.session.commit() rv = client.get("/api/packages/?protocol_version=20") packages = parse_json(rv.data) assert len(packages) == 4 for package in packages: assert package["name"] != "awards" validate_package_list(packages, True)
def test_packages_with_query(client): """Start with a test database.""" populate_test_data(db.session) db.session.commit() rv = client.get("/api/packages/?q=food") packages = parse_json(rv.data) assert len(packages) == 2 validate_package_list(packages) assert (packages[0]["name"] == "food" and packages[1]["name"] == "food_sweet") or \ (packages[1]["name"] == "food" and packages[0]["name"] == "food_sweet")
def setGradleConfig(self, rollback_obj, cfg_dir, apk_cfg_info, apk_name): print("set gradle config") gradle_config_path = utils.flat_path( os.path.join(self.proj_android_path, 'launcher/config.gradle')) cfg_file_path = utils.flat_path(os.path.join(cfg_dir, "package.json")) if sys.platform == "win32": cfg_file_path = cfg_file_path.replace("\\", "/") rollback_obj.record_file(gradle_config_path) # 设置 packagePath 路径 data = [] f = open(gradle_config_path) lines = f.readlines() f.close() for line in lines: if (line.find('def packagePath =') == 0): line = 'def packagePath = \"%s\"' % cfg_file_path + '\n' data.append(line) f = open(gradle_config_path, "w") f.writelines(data) f.close() apk_cfg_info = utils.parse_json(cfg_file_path) keystore_path = utils.flat_path( os.path.join( cfg_dir, apk_cfg_info[PackAPK.CFG_SIGN][PackAPK.CFG_SIGN_FILE])) if sys.platform == "win32": keystore_path = keystore_path.replace("\\", "/") apk_cfg_info['sign']['storefile'] = keystore_path apk_cfg_info['sign']['output_apk_dir'] = self.output_path apk_cfg_info['sign']['output_apk_name'] = apk_name # save package.json packageFile = open(cfg_file_path, 'w+') try: str = json.dumps(apk_cfg_info, ensure_ascii=False, indent=4, separators=(',', ':')) packageFile.write(str) except: raise_known_error("write package.json error!") finally: packageFile.close()
def createManifestFile(rootPath, module, moduleType=None, specVer=None): if os.path.exists(rootPath) == False: raise_known_error("root path is not found ===" + rootPath, KnownError.ERROR_PATH_NOT_FOUND) configPath = os.path.join(rootPath, module, "wxpack.json") if not os.path.exists(configPath): raise_known_error("%s not found" % configPath, KnownError.ERROR_PATH_NOT_FOUND) commonConfig = utils.parse_json(configPath) if specVer is None: if not commonConfig.has_key('version'): raise_known_error("%s/wxpack.json 必须包含version参数" % module) version = commonConfig.get("version", 1) else: version = specVer dirname = os.path.join(rootPath, module) basename = os.path.basename(dirname) filename = os.path.join(dirname, "manifest_" + basename + ".js") if os.path.exists(dirname) == False: raise_known_error("module path is not exists ===" + module, KnownError.ERROR_PATH_NOT_FOUND) codeStr = "function manifest_" codeStr += basename codeStr += "() {\nreturn " data = {} data["version"] = version fileList = seekFiles(rootPath, commonConfig, dirname, moduleType) data["jslist"] = fileList str = json.dumps(data, indent=4, separators=(',', ': ')) codeStr += str codeStr += "}\n" codeStr += 'manifest_' codeStr += basename codeStr += "()" file_object = open(filename, 'w+') try: file_object.write(codeStr) finally: file_object.close() # 生成 plist 资源映射表 refreshAssetManager(rootPath, module)
def start(event): # Hide window window.withdraw() global items_dic, rbutton_choose if rbutton_choose.get() is 1: league = utils.League.STANDARD else: league = utils.League.CHALLENGE # Update data from poe.ninja json_data = utils.update_html_json(league) # Parse JSON-data items_dic = utils.parse_json(json_data) # Create thread for main process t = threading.Thread(target=check_conditions, args=(1, )) t.daemon = True t.start()
def get_username(self): url = 'https://passport.jd.com/user/petName/getUserInfoForMiniJd.action' payload = { 'callback': 'jQuery'.format(random.randint(1000000, 9999999)), '_': str(int(time.time() * 1000)), } headers = {'Referer': 'https://order.jd.com/center/list.action'} rsp = self.session.get(url=url, params=payload, headers=headers) try_count = 5 while not rsp.text.startswith('jQuery'): try_count = try_count - 1 if try_count > 0: rsp = self.session.get(url=url, params=payload, headers=headers) else: break return parse_json(rsp.text).get('nickName')
def get_user_info(self): """获取用户信息 :return: 用户名 """ url = 'https://passport.jd.com/user/petName/getUserInfoForMiniJd.action' payload = { 'callback': 'jQuery{}'.format(random.randint(1000000, 9999999)), '_': str(int(time.time() * 1000)), } headers = { 'User-Agent': self.user_agent, 'Referer': 'https://order.jd.com/center/list.action', } try: resp = self.sess.get(url=url, params=payload, headers=headers) resp_json = parse_json(resp.text) # many user info are included in response, now return nick name in it # jQuery2381773({"imgUrl":"//storage.360buyimg.com/i.imageUpload/xxx.jpg","lastLoginTime":"","nickName":"xxx","plusStatus":"0","realName":"xxx","userLevel":x,"userScoreVO":{"accountScore":xx,"activityScore":xx,"consumptionScore":xxxxx,"default":false,"financeScore":xxx,"pin":"xxx","riskScore":x,"totalScore":xxxxx}}) return resp_json.get('nickName') or 'jd' except Exception: return 'jd'
def _pod_version(client, pod_id, version_id): url = urljoin(DCOS_SERVICE_URL, _pod_versions_url(pod_id, version_id)) return parse_json(http.get(url))
# -*- coding: utf-8 -*- """ Created on 3/3/21 2:07 PM @Author : Justin Jiang @Email : [email protected] """ import logging import os from utils import parse_json BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) default_config_path = BASE_DIR + '/config/global_config.json' config_dict = parse_json(default_config_path) class MyLogger(object): def __init__(self, name, project="monitor", level=logging.INFO): self.logger = logging.getLogger(name) # 得到一个logger对象 self.logger.handlers.clear() # 每次被调用后,清空已经存在handler self.project = project file_handler = logging.FileHandler( config_dict["logging_path"] + project + '.log', encoding='utf-8') # 指定输出的文件路径,将日志消息发送到磁盘文件,默认情况下文件大小会无限增长 log_format = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") file_handler.setFormatter(log_format) self.logger.addHandler(file_handler) # 为logger添加的日志处理器 self.logger.setLevel(level) # 指定日志的最低输出级别,默认为Info级别
def update_graph(input_value, property, year): n_clicks = input_value #Substitute with input values if n_clicks > 0: myString = "'" "," "'".join(property) properties = "('" + myString + "')" templated_string = template.substitute(modifier=properties) print(templated_string) job = {"code": templated_string} job_info = LivyRequests().run_job(idle_session_url, job) statement_url = job_info["statement-url"] #after running the job need to wait for output. status will change to available and output to ok. while (True): statement_response = LivyRequests().job_info(statement_url) if statement_response["state"] == "available": while (True): print("checking the output object") if statement_response["output"] is not None: print("in none") break else: print("still in while") data = statement_response["output"]["data"] payload = parse_json(data["text/plain"]) dfe = pd.DataFrame(list(zip(*payload["x"])), columns=payload["y"]) #use the dataframe to create traces and plotly yr = int(year) depths = [[] for i in range(len(property))] depth_i = 0 traces = [] x = dfe.forecast_period_end_date[ (dfe['property_code'].isin(property)) & (pd.DatetimeIndex( dfe['forecast_period_end_date']).year > yr)] for i in property: depths[depth_i] = dfe.avg_net_income_pq[ (dfe['property_code'] == i) & (pd.DatetimeIndex( dfe['forecast_period_end_date']).year > yr)] mydict = { 'x': x, 'y': depths[depth_i], 'type': 'lines', 'name': property[depth_i] } traces.append(mydict) depth_i = depth_i + 1 break else: time.sleep(1) else: traces = [] x = [] y = [] return { "data": traces, "layout": go.Layout(margin={ "b": 40, "t": 10, "r": 0 }, hovermode="closest") }
def _pod_status(client, pod_id): url = urljoin(DCOS_SERVICE_URL, _pod_status_url(pod_id)) return parse_json(http.get(url))
def test_init_container_from_json(self): self.init_all_cards() cards = parse_json('all_cards.json') self.assertLength(self.container, len(cards))
def get_ssr_conf(is_local: bool, ssr_conf_path: str): _ssr_conf: Dict = utils.parse_json(ssr_conf_path) if not _ssr_conf: exit.error('ssr-config not defined.') # process default data if is_local: if _ssr_conf.get('server') is None: exit.error('server addr not defined.') else: _ssr_conf['server'] = common.to_str(_ssr_conf['server']) else: _ssr_conf['server'] = common.to_str(_ssr_conf.get('server', '0.0.0.0')) try: _ssr_conf['forbidden_ip'] = \ common.IPNetwork(_ssr_conf.get('forbidden_ip', '127.0.0.0/8,::1/128')) except Exception as e: exit.error('error configuration \'forbidden_ip\'.') try: _ssr_conf['forbidden_port'] = common.PortRange( _ssr_conf.get('forbidden_port', '')) except Exception as e: exit.error('error configuration \'forbidden_port\'.') try: _ssr_conf['ignore_bind'] = \ common.IPNetwork(_ssr_conf.get('ignore_bind', '127.0.0.0/8,::1/128,10.0.0.0/8,192.168.0.0/16')) except Exception as e: exit.error('error configuration \'ignore_bind\'.') _ssr_conf['server_port'] = int(_ssr_conf.get('server_port', 8388)) _ssr_conf['local_address'] = common.to_str( _ssr_conf.get('local_address', '127.0.0.1')) _ssr_conf['local_port'] = int(_ssr_conf.get('local_port', 1080)) _ssr_conf['password'] = common.to_bytes(_ssr_conf.get('password', b'')) _ssr_conf['method'] = common.to_str(_ssr_conf.get('method', 'aes-128-ctr')) _ssr_conf['protocol'] = common.to_str( _ssr_conf.get('protocol', 'auth_aes128_md5')) _ssr_conf['protocol_param'] = common.to_str( _ssr_conf.get('protocol_param', '')) _ssr_conf['obfs'] = common.to_str( _ssr_conf.get('obfs', 'tls1.2_ticket_auth')) _ssr_conf['obfs_param'] = common.to_str(_ssr_conf.get('obfs_param', '')) _ssr_conf['port_password'] = None _ssr_conf['additional_ports'] = _ssr_conf.get('additional_ports', {}) _ssr_conf['additional_ports_only'] = \ _ssr_conf.get('additional_ports_only') is not None and 'true' == _ssr_conf.get('additional_ports_only') _ssr_conf['timeout'] = int(_ssr_conf.get('timeout', 120)) _ssr_conf['udp_timeout'] = int(_ssr_conf.get('udp_timeout', 60)) _ssr_conf['udp_cache'] = int(_ssr_conf.get('udp_cache', 64)) _ssr_conf['fast_open'] = _ssr_conf.get( 'fast_open') is not None and 'true' == _ssr_conf.get('fast_open') _ssr_conf['workers'] = _ssr_conf.get('workers', 1) _ssr_conf['pid-file'] = _ssr_conf.get('pid-file', '/var/run/shadowsocksr.pid') _ssr_conf['log-file'] = _ssr_conf.get('log-file', '/var/log/shadowsocksr.log') _ssr_conf['verbose'] = _ssr_conf.get( 'verbose') is not None and 'true' == _ssr_conf.get('verbose') _ssr_conf['connect_verbose_info'] = _ssr_conf.get('connect_verbose_info', 0) check_config(_ssr_conf, is_local) return _ssr_conf
# -*- coding: utf-8 -*- """ Created on 3/3/21 4:00 PM @Author : Justin Jiang @Email : [email protected] """ from monitor import FundMonitor, SystemMonitor, StockMonitor from utils import parse_json, get_time from logger import MyLogger from threading import Timer import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) global_config_path = BASE_DIR + '/config/global_config.json' global_config = parse_json(global_config_path) target_fund = global_config.get("target_fund") logger = MyLogger("looper.py").get_logger() fund_monitor = FundMonitor() sys_monitor = SystemMonitor() stock_monitor = StockMonitor() system_period = global_config["record_system_period"] fund_period = global_config["record_fund_period"] time_period = global_config["record_time_period"] stock_period = global_config["record_stock_period"] index_period = global_config["record_index_period"]
def test_packages_empty(client): """Start with a blank database.""" rv = client.get("/api/packages/") assert parse_json(rv.data) == []
place=place, feed_list=feed_var_names, program=program) reader = data_reader.get_reader(conf_dict, True, samples_file) # Get batch data iterator batch_data = paddle.batch(reader, conf_dict["batch_size"]) logging.info("start test process ...") for iter, data in enumerate(batch_data()): output = executor.run(program, feed=feeder.feed( data), fetch_list=fetch_targets) if conf_dict["task_mode"] == "pairwise": predictions_file.write( "\n".join(map(lambda item: str(item[0]), output[1])) + "\n") else: predictions_file.write( "\n".join(map(lambda item: str(np.argmax(item)), output[1])) + "\n") utils.get_result_file(conf_dict, "samples.txt", "predictions.txt") if __name__ == "__main__": log.init_log("./log/paddle") parser = argparse.ArgumentParser() parser.add_argument("--task_type", default="train", help="task type include train/predict, the default value is train") parser.add_argument( "--conf_file_path", default="examples/cnn_pointwise.json", help="config file path") args = parser.parse_args() conf_dict = utils.parse_json(args.conf_file_path) if args.task_type == "train": train(conf_dict) else: predict(conf_dict)
def get_pod_versions(pod_id): url = urljoin(DCOS_SERVICE_URL, get_pod_versions_url(pod_id)) return parse_json(http.get(url))
def get_single_item_stock(self, sku_id, num, area): """获取单个商品库存状态 :param sku_id: 商品id :param num: 商品数量 :param area: 地区id :return: 商品是否有货 True/False """ area_id = parse_area_id(area) cat = self.item_cat.get(sku_id) vender_id = self.item_vender_ids.get(sku_id) if not cat: page = self._get_item_detail_page(sku_id) match = re.search(r'cat: \[(.*?)\]', page.text) cat = match.group(1) self.item_cat[sku_id] = cat match = re.search(r'venderId:(\d*?),', page.text) vender_id = match.group(1) self.item_vender_ids[sku_id] = vender_id url = 'https://c0.3.cn/stock' payload = { 'skuId': sku_id, 'buyNum': num, 'area': area_id, 'ch': 1, '_': str(int(time.time() * 1000)), 'callback': 'jQuery{}'.format(random.randint(1000000, 9999999)), 'extraParam': '{"originid":"1"}', # get error stock state without this param 'cat': cat, # get 403 Forbidden without this param (obtained from the detail page) 'venderId': vender_id # return seller information with this param (can't be ignored) } headers = { 'User-Agent': self.user_agent, 'Referer': 'https://item.jd.com/{}.html'.format(sku_id), } resp_text = '' try: resp_text = requests.get(url=url, params=payload, headers=headers, timeout=self.timeout).text resp_json = parse_json(resp_text) stock_info = resp_json.get('stock') sku_state = stock_info.get('skuState') # 商品是否上架 stock_state = stock_info.get( 'StockState' ) # 商品库存状态:33 -- 现货 0,34 -- 无货 36 -- 采购中 40 -- 可配货 return sku_state == 1 and stock_state in (33, 40) except requests.exceptions.Timeout: logger.error('查询 %s 库存信息超时(%ss)', sku_id, self.timeout) return False except requests.exceptions.RequestException as request_exception: logger.error('查询 %s 库存信息发生网络请求异常:%s', sku_id, request_exception) return False except Exception as e: logger.error('查询 %s 库存信息发生异常, resp: %s, exception: %s', sku_id, resp_text, e) return False