def read_jsonld(): """ Find jsonld file in the cwd (or within a 2 levels below cwd), and load it in. :return dict: Jsonld data """ _d = {} try: # Find a jsonld file in cwd. If none, fallback for a json file. If neither found, return empty. _filename = [file for file in os.listdir() if file.endswith(".jsonld")][0] if not _filename: _filename = [file for file in os.listdir() if file.endswith(".json")][0] if _filename: try: # Load and decode _d = demjson.decode_file(_filename, decode_float=float) logger_jsons.info("Read JSONLD successful: {}".format(_filename)) except FileNotFoundError as fnf: print("Error: metadata file not found: {}".format(_filename)) logger_jsons.error("read_jsonld: FileNotFound: {}, {}".format(_filename, fnf)) except Exception: try: _d = demjson.decode_file(_filename, decode_float=float, encoding="latin-1") logger_jsons.info("Read JSONLD successful: {}".format(_filename)) except Exception as e: print("Error: unable to read metadata file: {}".format(e)) logger_jsons.error("read_jsonld: Exception: {}, {}".format(_filename, e)) else: print("Error: metadata file (.jsonld) not found in LiPD archive") except Exception as e: print("Error: Unable to find jsonld file in LiPD archive. This may be a corrupt file.") logger_jsons.error("Error: Unable to find jsonld file in LiPD archive. This may be a corrupt file.") logger_jsons.info("exit read_json_from_file") return _d
def read_json_from_file(filename): """ Import the JSON data from target file. :param str filename: Target File :return dict: JSON data """ logger_jsons.info("enter read_json_from_file") d = {} try: # Load and decode d = demjson.decode_file(filename) logger_jsons.info("successful read from json file") except FileNotFoundError: # Didn't find a jsonld file. Maybe it's a json file instead? try: d = demjson.decode_file(os.path.splitext(filename)[0] + ".json") except FileNotFoundError as e: # No json or jsonld file. Exit print("Error: jsonld file not found: {}".format(filename)) logger_jsons.debug("read_json_from_file: FileNotFound: {}, {}".format(filename, e)) except Exception: print("Error: unable to read jsonld file") if d: d = remove_empty_fields(d) logger_jsons.info("exit read_json_from_file") return d
def read_json_from_file(filename): """ Import the JSON data from target file. :param str filename: Target File :return dict: JSON data """ logger_jsons.info("enter read_json_from_file") d = OrderedDict() try: # Load and decode d = demjson.decode_file(filename, decode_float=float) logger_jsons.info("successful read from json file") except FileNotFoundError: # Didn't find a jsonld file. Maybe it's a json file instead? try: d = demjson.decode_file(os.path.splitext(filename)[0] + '.json', decode_float=float) except FileNotFoundError as e: # No json or jsonld file. Exit print("Error: jsonld file not found: {}".format(filename)) logger_jsons.debug("read_json_from_file: FileNotFound: {}, {}".format(filename, e)) except Exception: print("Error: unable to read jsonld file") if d: d = rm_empty_fields(d) logger_jsons.info("exit read_json_from_file") return d
def load_pipeline_json(self, url): """ Loads the Pipeline from the url location and parses all data to create the corresponding executed_cats Args: | *url*: location identifier for the pipeline.json """ try: json = demjson.decode_file(url, "UTF-8") except demjson.JSONDecodeError as e: e = sys.exc_info()[0] print("Unable to parse " + url + " trace: " + e) position = 0 for alg in json: alg_name = alg[0] alg_attributes = alg[1] cat_name = alg_attributes["type"] self.new_category(position, cat_name, alg_name) active_alg = self.executed_cats[position].active_algorithm active_alg.store_image = alg_attributes["store_image"] for name in alg_attributes.keys(): if name == "type" or name == "store_image": continue value = alg_attributes[name] active_alg.find_ui_element(name).set_value(value) position += 1 self.pipeline_path = url
def run_analisi(filename): print("workin' on {}".format(filename)) data = demjson.decode_file(filename) for comm in data: if comm['author'] in selected_users: text = comm['body'] subreddit = comm['subreddit'] post_id = comm['id'] results = analisi_linguistica(post_id, subreddit, text) has_entities = results[2] != [] id_stemmTxt = str(results[0]) + "," + str(results[1]) + "\n" id_entities = str(results[0]) + "," + str(results[2]) + "\n" if has_entities: print(id_entities, file=open('results_tp/entities_sentiment_' + sys.argv[1] , 'a')) print(id_stemmTxt, file=open('results_tp/stemmed_text_' + sys.argv[1], 'a')) print("done!")
async def main(): config = demjson.decode_file('./data/config.json') pokedata = await getResource(config['pokemonStats']) rawStats, maxStam, maxAtk, maxDef = calcStats(config, pokedata) res = config['tiers'].copy() for tierName in res: iterable = filter(lambda p: p.get('tier', '') == tierName, rawStats) for sortInfo in reversed(config['tiers'][tierName]['sort']): keyfunc = itemgetter(sortInfo['field']) rev = sortInfo['dir'] == 'desc' iterable = sorted(iterable, key=keyfunc, reverse=rev) res[tierName] = list(iterable) tiersOrdered = [(tierData['order'], tierName) for (tierName, tierData) in config['tiers'].items()] tiersOrdered.sort() env = jinja2.Environment(loader=jinja2.FileSystemLoader( os.path.join(os.path.curdir, 'data')), autoescape=True) statLimits = {'atk': maxAtk, 'def': maxDef, 'stam': maxStam} template = env.get_template('template.html') with open('index.html', 'w', encoding='utf-8') as outputFile: outputFile.write( template.render(tiersOrdered=tiersOrdered, tierData=res, config=config, statLimits=statLimits))
def getEmbedData(self, embedData, jsPost): if not os.path.exists("content_provider.json"): raise PixivException("Missing content_provider.json, please redownload application!", errorCode=PixivException.MISSING_CONFIG, htmlPage=None) cfg = demjson.decode_file("content_provider.json") embed_cfg = cfg["embedConfig"] current_provider = embedData["serviceProvider"] if current_provider in embed_cfg: if embed_cfg[current_provider]["ignore"]: return "" content_id = None for key in embed_cfg[current_provider]["keys"]: if embedData.has_key(key): content_id = embedData[key] break if content_id is not None and len(content_id) > 0: content_format = embed_cfg[current_provider]["format"] return content_format.format(content_id) else: raise PixivException("Empty content_id for embed provider = {0} for post = {1}, please update content_provider.json.".format(embedData["serviceProvider"], self.imageId), errorCode=9999, htmlPage=jsPost) else: raise PixivException("Unsupported embed provider = {0} for post = {1}, please update content_provider.json.".format(embedData["serviceProvider"], self.imageId), errorCode=9999, htmlPage=jsPost)
def __init__(self): self.coding = 'utf-8' self.jxbh2name = {} self.canjxbh = [] self.qiang = [] self.huan = {} if platform.system() == 'Windows': self.coding = 'gbk' self._user = "******" self._pwd = "sjgqnbzplyzibbbf" self._to = "*****@*****.**" print u'====================================================================\n' print u' Copyright (C) 2016 huangjw' print u' All rights reserved' print u' Contact: [email protected]\n' print u'====================================================================\n' print u'----------------运行过程中按Ctrl C退出或直接关闭窗口----------------' print u'--------------点击窗口左上角的白色按钮-->编辑可进行粘贴-------------\n' flag = raw_input(u'抢到课后是否进行邮件提醒(y or n): '.encode(self.coding)).strip() if flag == 'y' or flag == 'Y': self._to = raw_input(u'请输入邮箱: '.encode(self.coding)).strip() try: user = demjson.decode_file(get_desktop() + '\student.txt') for key in user: self.stuNum = key self.password = user[key] except Exception: self.stuNum = raw_input(u'请输入学号: '.encode(self.coding)).strip() print u'请输入密码: ', self.password = pwd_input() self.password = hashlib.md5( self.password.encode('utf-8')).hexdigest().upper()
def find_boson_entity(doc_id, phrase_id): if doc_id.strip() == "": return "" print "#######################" print "find_boson_entity: ", doc_id, phrase_id print "#######################" curdir = dirname(__file__) parentdir = dirname(curdir) doc_path = os.path.join(parentdir, "boson_cache", doc_id) try: boson_json = demjson.decode_file(doc_path, encoding="utf8") words = boson_json[0]["word"] # format: [12, 16, "company_name"] # entity要根据 start, end来合并words中的word来组成 entity_infos = boson_json[0]["entity"] pos_tags = boson_json[0]["tag"] entities = [] startpos = [] endpos = [] entity_unicode_lens = [] ner_tags = [] except Exception, e: print doc_id print e.message()
def _output2(): json_ver = '/Users/joli/Downloads/fy/assets/game/resource/1_00.58_version.json' conf_src = '/Users/joli/Downloads/fy/assets/game/resource/config' conf_dst = '/Users/joli/Downloads/fy/assets/game/resource/config' ver_dict = {} for k, v in json.loads(FS.read_text(json_ver)).items(): ver_dict[v] = k for f in FS.walk_files(conf_src, ewhites=['.json']): # f = '/Users/joli/Downloads/fy/assets/game/resource/config/4643a093.json' config = demjson.decode_file(f) if not config: print('empty config:' + f) continue vf = ver_dict[FS.filename(f)] df = os.path.join(conf_dst, vf[0:vf.rfind('.')] + '.xlsx') print(f) print(vf) print(df) wb = openpyxl.Workbook() if vf.startswith('global/') or 'error_code_data' in vf: print(config) _build_sheet1(wb.active, config, FS.filename(df), flat=True) else: _build_book1(wb, config, FS.filename(df)) FS.make_parent(df) wb.save(df)
def load_pipeline_json(self, url): """ Loads the Pipeline from the url location and parses all data to create the corresponding executed_cats Args: | *url*: location identifier for the pipeline.json """ try: json = demjson.decode_file(url, "UTF-8") except demjson.JSONDecodeError as e: e = sys.exc_info()[0] print("Unable to parse " + url + " trace: " + e) for position, alg in enumerate(json): alg_name = alg[0] alg_attributes = alg[1] cat_name = alg_attributes["type"] self.new_category(position, cat_name, alg_name) active_alg = self.executed_cats[position].active_algorithm active_alg.store_image = alg_attributes["store_image"] for name in alg_attributes.keys(): if name == "type" or name == "store_image": continue value = alg_attributes[name] alg_ui_elem = active_alg.find_ui_element(name) if alg_ui_elem: alg_ui_elem.set_value(value) self.pipeline_path = url # reset current cache self.set_cache()
def load_tag(dep_config): get_tag(dep_config["giturl"], dep_config["tag"], location, key_file_path) return demjson.decode_file( os.path.join(location, get_project_name_from_url(dep_config["giturl"]), "deploy", dep_config["config_name"] + ".cfg"))
def load_file(self, filename): """ 通过给定的xml文件名, 对xml对象进行初始化 :param filename: :return: """ self.__json_obj = demjson.decode_file(filename)
def parse_file_intrinsically(path): try: o = json.decode_file(path, strict=True) except Exception as e: raise sys.exit(1) return
def load_db(self, db_file): db = demjson.decode_file(db_file) for ver_code in db: self.db_list.append(StrictVersion(ver_code['version'])) self.db_dict[ver_code['version']] = ver_code['code'] self.db_list.sort() self.db_loaded = True
def get_data(filepath): assert_that(filepath).exists() res = demjson.decode_file(filepath) assert_that(res).is_instance_of(list).is_not_empty() assert_that(res[0]).contains_key('collection', 'items', 'keys') return res
def parse_content(self): self.data = {} if mz_json == "demjson": self.data = demjson.decode_file(self.conf_name) mzpp(self.data) else: #json with open(self.conf_name, 'r', encoding='utf-8') as _f: self.data = json.loads(_f.read()) mzpp(self.data) return self.data
def test_build(): assert_that( os.path.join(test_root(), "project1", "deploy", "roles", "main", "defaults", "main.yml")).exists() build_deploy_script("project1", "dev") assert_that( demjson.decode_file( os.path.join(test_root(), "project1", "deploy", "dev.cfg"))["predefined_variables"]).contains_entry( {"name1": "a"}, {"name2": "b"})
def test_build_gate(): try: init_root(test_root()) build_gate(project_name, config_name, True) finally: init_root(os.getcwd()) assert_that(os.path.join(test_root(), project_name, "deploy", "roles", "auth_db")).exists() assert_that(os.path.join(test_root(), project_name, "deploy", "roles", "microservice_gate")).exists() config = demjson.decode_file(os.path.join(test_root(), project_name, "deploy", "dev.cfg")) assert_that(config["roles_seq"]).contains("auth_db").contains("microservice_gate")
def build_deploy_script_internal(project_name, config_name, only_structure=False, remote_dict=None): deploy_path = lambda:get_deploy_path(project_name, config_name) deployInfo = DeployInfo(deploy_path()) def yml_file_folder(): return put_folder(os.path.abspath(deploy_path())) def get_roles_data(): return roles_load(logger.title("config_path").debug(get_config_path(project_name, config_name))) def write_playbook(content): open(put_file(deployInfo.playbook_path()), 'w').write(content) # write data to file write_playbook( \ roles_build( \ get_roles_data() \ , remote_host=None if remote_dict is None else 'remote' \ , remote_name=remote_dict["remote_user"] if remote_dict else None \ ) \ ) # build link for role folders roles_link(get_roles_data(), \ logger.title("link_root").debug(yml_file_folder()) \ ) # link src folder to deploy/roles/main/files/src for deployment link_src_to_deploy(get_roles_data()) def default_vals(): return all_defaults(yml_file_folder(), [role["name"] for role in get_roles_data()]) # build inventory files on roles write_defaults(deployInfo.host_file_path(), default_vals(), remote_addr=None if remote_dict is None else remote_dict["remote_addr"]) # write the defaults to cfg file (F(lambda config_path: demjson.decode_file(config_path)) >> \ F(lambda new_vals, old_json: update_dict(old_json, new_vals), {} if only_structure else {"predefined_variables": default_vals()}) >> \ F(lambda json_data: json.dumps(json_data, indent=4, ensure_ascii=False)) >> \ F(lambda content: open(get_config_path(project_name, config_name), 'w').write(content)))(get_config_path(project_name, config_name)) def build_ansible_cfg(): if remote_dict is None: return open(get_deploy_ansible_cfg_path(project_name, config_name), 'w').write('''[defaults] remote_user={remote_user} private_key_file={key_file} host_key_checking=False '''.format(remote_user=remote_dict["remote_user"], key_file=remote_dict["key"])) build_ansible_cfg()
def _read_json_from_file(filename): """ Import the JSON data from target file. :param filename: (str) Target file :return: (dict) JSON data """ d = {} try: # Load json into dictionary d = demjson.decode_file(filename) except FileNotFoundError: print("LiPD object: Load(). file not found") return d
def load_all_config(config_path): ''' config_path: the path of the banyan configuration file ''' def get_cfg(dep_json): def load_cfg(cfg_path): return demjson.decode_file(cfg_path) return (F(get_config_path) >> F(load_cfg))( dep_json["project_name"], dep_json["config_name"] if "config_name" in dep_json else None) def get_roles(cfg_json, project_path): return [{"project_name": os.path.split(project_path)[1], \ "project_path": project_path, "role_name": role_name} for role_name in cfg_json["roles_seq"]] if "roles_seq" in __logger__.title("get_roles cf_json").debug(cfg_json) else [] def handle(cfg_json, depended_roles, project_path): try: return [y for x in [handle(get_cfg(dep), depended_roles, get_project_path(dep["project_name"])) for dep in cfg_json["dependencies"]] for y in x] \ + get_roles(cfg_json, __logger__.title("get_roles_path_recursive").debug(project_path)) \ if "dependencies" in cfg_json \ else depended_roles + get_roles( \ __logger__.title("no dep json").debug(cfg_json) \ , __logger__.title("no dep path").debug(project_path) \ ) except Exception: __logger__.error( "error happen in handle with project_path:%s\ncfg_json:%s" % (project_path, cfg_json)) raise def put_all_db_first(roles): def get_db_roles(): return list(filter(lambda n: is_db_role(n), roles)) def get_non_db_roles(): return list(filter(lambda n: not is_db_role(n), roles)) return get_db_roles() + get_non_db_roles() return __logger__.title('load_all_config.result') \ .debug( \ (F(handle) \ >> F(put_all_db_first) \ )( \ demjson.decode_file(config_path) \ , [] \ , extract_project_path(config_path) \ ) \ )
def _output1(): src = '/Users/joli/Downloads/configIOS_json190731' dst = '/Users/joli/Downloads/configIOS_excel' for filename in FS.walk_files(src, ewhites=['.json'], cut=len(src) + 1): print('------------------------------------', filename) name = FS.filename(filename) conf = demjson.decode_file(os.path.join(src, filename)) if not conf: print('empty config') continue wb = openpyxl.Workbook() # print(wb.get_sheet_names()) _build_book2(wb, conf, name) wb.save(os.path.join(dst, name + '.xlsx'))
def load_all_dependencies(config_path, handler): dependencies = lambda: "dependencies" def handle(dep_data): def handle_config(config_data): if dependencies() in config_data: [handle(dep) for dep in config_data[dependencies()]] handle_config(handler(dep_data)) def handle_from_root(config_data): if dependencies() in config_data: [handle(dep) for dep in config_data["dependencies"]] handle_from_root(demjson.decode_file(config_path))
def test_init_config(): project_name = "abc" config_name = "dev" init_config(project_name, config_name) json = demjson.decode_file(get_config_path(project_name, config_name)) assert_that(json).contains_entry({"project_name": "abc"}) \ .contains_entry({"predefined_variables": {}}) assert_that(json["roles_seq"]).contains("role1", "role2") assert_that(json["dependencies"][0]).contains_entry({"project_name": "<project_name>"}) \ .contains_entry({"git": "<git_url>"}) \ .contains_entry({"config_name": "<configuration_name>"})
def gamecenter(self, gamecenter=None, fname=None): ''' Parses gamecenter (json document) given a game_id and either a string or filename :param gamecenter(str): :param fname(str): :return: combined(dict): stats for home and away team ''' # can pass string or filename if gamecenter: try: parsed = demjson.decode(gamecenter) except: logging.exception('json parse from content failed') elif fname: try: parsed = demjson.decode_file(fname) except: logging.exception('json parse from filename failed') else: raise ValueError('must pass content or filename') if parsed: game_id = parsed.keys()[0] home_team_stats = self._gamecenter_team( parsed[game_id]['home']['stats']) away_team_stats = self._gamecenter_team( parsed[game_id]['away']['stats']) # use player_id as a key, value is entire player dictionary (with player_id as duplicate) combined = self._merge_dicts(home_team_stats, away_team_stats) else: raise ValueError('parsed should not be null') ''' puntret: avg, lng, lngtd, name, ret, tds fumbles: lost, name, rcv, tot, trcv, yds defense: ast, ffum, int, name, sk, tkl rushing: att, lng.lngtd, name, tds, twopta, twoptm, yds receiving: lng, lngtd, name, rec, tds, twopta, twoptm, yds passing: att, cmp, ints, name, tds, twopta, twoptm, yds ''' return combined
def test1(self): json_ltp = demjson.decode_file("./" + "ltp_0_test.json") data_ltp = json_ltp["data"] number_of_phrase = len(data_ltp) for j in range(3): ltp_entities_info = nerstat.find_entity_info(data_ltp[j], "ltp") entities = ltp_entities_info["entity"] ner_tags = ltp_entities_info["entity_type"] entity_unicode_lens = ltp_entities_info["entity_unicode_len"] startposes = ltp_entities_info["startpos"] endposes = ltp_entities_info["endpos"] num = len(entities) for i in range(num): print entities[i].decode("utf-8") print ner_tags[i] print startposes[i], endposes[i], entity_unicode_lens[i]
def test1(self): json_fool = demjson.decode_file("./" + "foolnltk_0_test.json") data_fool = json_fool["data"] number_of_phrase = len(data_fool) for j in range(3): myentity = nerstat.find_foolnltk_entity(data_fool[j]) entities = myentity["entity"] ner_tags = myentity["entity_type"] entity_unicode_lens = myentity["entity_unicode_len"] startposes = myentity["startpos"] endposes = myentity["endpos"] num = len(entities) for i in range(num): print entities[i].decode("utf-8") print ner_tags[i] print startposes[i], endposes[i], entity_unicode_lens[i]
def restore(cursor, i): # Decode logging.log(logging.INFO, "Child Process %d begin decode file[bak%d.txt]...", i, i) data = json.decode_file("bak"+str(i)+".txt") logging.log(logging.INFO, "Child Process %d decode file end, begin insert data to database...", i) # Insert for v in data: sql = insertSql % (conf["table"], v[0], v[1], v[2]) try: # logging.log(logging.DEBUG, i[0], i[1], i[2]) cursor.execute(sql) except: logging.log(logging.ERROR, "InsertError: %s", sql) raise logging.log(logging.INFO, "Child Process %d exit...", i)
def getEmbedData(self, embedData, jsPost) -> str: # Issue #881 content_provider_path = os.path.abspath( os.path.dirname(sys.executable) + os.sep + "content_provider.json") if not os.path.exists(content_provider_path): content_provider_path = os.path.abspath("./content_provider.json") if not os.path.exists(content_provider_path): raise PixivException( f"Missing content_provider.json, please get it from https://github.com/Nandaka/PixivUtil2/blob/master/content_provider.json! Expected location => {content_provider_path}", errorCode=PixivException.MISSING_CONFIG, htmlPage=None) cfg = demjson.decode_file(content_provider_path) embed_cfg = cfg["embedConfig"] current_provider = embedData["serviceProvider"] if current_provider in embed_cfg: if embed_cfg[current_provider]["ignore"]: return "" content_id = None for key in embed_cfg[current_provider]["keys"]: if key in embedData: content_id = embedData[key] break if content_id is not None and len(content_id) > 0: content_format = embed_cfg[current_provider]["format"] return content_format.format(content_id) else: msg = "Empty content_id for embed provider = {0} for post = {1}, please update content_provider.json." raise PixivException(msg.format(embedData["serviceProvider"], self.imageId), errorCode=9999, htmlPage=jsPost) else: msg = "Unsupported embed provider = {0} for post = {1}, please update content_provider.json." raise PixivException(msg.format(embedData["serviceProvider"], self.imageId), errorCode=9999, htmlPage=jsPost)
def restore(cursor, i): # Decode logging.log(logging.INFO, "Child Process %d begin decode file[bak%d.txt]...", i, i) data = json.decode_file("bak" + str(i) + ".txt") logging.log( logging.INFO, "Child Process %d decode file end, begin insert data to database...", i) # Insert for v in data: sql = insertSql % (conf["table"], v[0], v[1], v[2]) try: # logging.log(logging.DEBUG, i[0], i[1], i[2]) cursor.execute(sql) except: logging.log(logging.ERROR, "InsertError: %s", sql) raise logging.log(logging.INFO, "Child Process %d exit...", i)
async def f(ctx): myqq = '[CQ:at,qq=1792174971] ' msg = str(ctx['message']) if myqq in msg: db = pymysql.connect("localhost", "baiuu", "clml159", "teach") cursor = db.cursor() msg = msg.replace(myqq, '') QQ = str(ctx['user_id']) if 'group_id' in ctx: QG = str(ctx['group_id']) else: QG = 'NULL' sql = "SELECT * FROM teach WHERE teach='" + \ msg+"' AND QG='" + str(QG) + "'" try: cursor.execute(sql) j_o = cursor.fetchone() except: # 如果发生错误则回滚 db.rollback() # 关闭数据库连接 db.close() if str(j_o) == "None": if QG == 'NULL': await bot.send_private_msg(user_id=QQ, message=msg + '该词条不存在') else: await bot.send_group_msg(group_id=QG, message=msg + '该词条不存在') else: d = path.dirname(__file__) jsonnew = d + "\\json" json_file = jsonnew + "\\" + j_o[1] o = demjson.decode_file(json_file.replace('\\', '/')) if QG == 'NULL': await bot.send_private_msg(user_id=QQ, message=o) else: await bot.send_group_msg(group_id=QG, message=o)
def generate_post_data(source_data): whether_signed = False model_data = json.decode_file("./json/model.json") current_date = get_current_date(TIME_ZONE) # current_date_time = current_date + ' 00:00:00' yesterday_date = get_yesterday_date(TIME_ZONE) yesterday_date_time = yesterday_date + ' 09:00:00' # current_timestamp = get_current_stamp() # if you didn't click the "暂存" button if "535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record" in source_data["body"][ "dataStores"]: source_record = source_data["body"]["dataStores"][ "535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"]["rowSet"]["primary"][0] model_data["body"]["dataStores"][ "535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"] = source_data["body"][ "dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"] print("today is " + source_record['SBSJ_STR'][0:10]) if source_record['SBSJ_STR'][0:10] == current_date: whether_signed = True # model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"]["rowSet"]["primary"][0]["CLSJ"] = current_timestamp # model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"]["rowSet"]["primary"][0]["SBSJ"] = current_timestamp else: source_record = source_data["body"]["dataStores"][ "535b1ef6-bf51-4d4c-9ae4-5a90cdc4"]["rowSet"]["primary"][0] del model_data["body"]["dataStores"][ "535b1ef6-bf51-4d4c-9ae4-5a90cdc4_record"] del model_data["body"]["dataStores"][ "535b1ef6-bf51-4d4c-9ae4-5a90cdc4"]["rowSet"]["primary"][0]["_o"] zh = source_record["ZH"] # student id xm = source_record["XM"] # student name xsxb = source_record["XSXB"] # student sex nl = source_record["NL"] # student age szdw = source_record["SZDW"] # student school zymc = source_record["ZYMC"] # student major xslx = source_record["XSLX"] # student type zxsj = source_record["ZXSJ"] # student phone number sbsj = current_date # date fdyxmx = source_record["FDYXMX"] # teacher name jjlxrxm = source_record["JJLXRXM"] # parent name jjlxrdh = source_record["JJLXRDH"] # parent phone number jjlxrybrgx = source_record["JJLXRYBRGX"] # parent rel. lxzt = source_record["LXZT"] # current city dqsfjjia = source_record["DQSFJJIA"] # at home or not? sheng_text = source_record["sheng_TEXT"] # provience text sheng = source_record["sheng"] # provience shi_text = source_record["shi_TEXT"] # city text shi = source_record["shi"] # city quxian_text = source_record["quxian_TEXT"] # tone text quxian = source_record["quxian"] # tone dqjzdz = source_record["DQJZDZ"] # location clsj = yesterday_date_time # temp. time # SYS_USER = source_vars[0]["value"] # student name # SYS_UNIT = source_vars[1]["value"] # student unit # SYS_DATE = current_timestamp # current timestamp # ID_NUMBER = source_vars[3]["value"] # student id # USER_NAME = source_vars[4]["value"] # student name # XB = source_vars[5]["value"] # student sex # SZYX = source_vars[6]["value"] # student school # ZYMC = source_vars[7]["value"] # student major # MOBILE = source_vars[8]["value"] # mobile model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["ZH"] = zh model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["XM"] = xm model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["XSXB"] = xsxb model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["NL"] = nl model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["SZDW"] = szdw model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["ZYMC"] = zymc model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["XSLX"] = xslx model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["ZXSJ"] = zxsj model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["SBSJ"] = sbsj model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["FDYXMX"] = fdyxmx model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["JJLXRXM"] = jjlxrxm model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["JJLXRDH"] = jjlxrdh model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["JJLXRYBRGX"] = jjlxrybrgx model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["LXZT"] = lxzt model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["DQSFJJIA"] = dqsfjjia model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["sheng_TEXT"] = sheng_text model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["sheng"] = sheng model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["shi_TEXT"] = shi_text model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["shi"] = shi model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["quxian_TEXT"] = quxian_text model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["quxian"] = quxian model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["DQJZDZ"] = dqjzdz model_data["body"]["dataStores"]["535b1ef6-bf51-4d4c-9ae4-5a90cdc4"][ "rowSet"]["primary"][0]["CLSJ"] = clsj model_data["body"]["dataStores"]["variable"] = source_data["body"][ "dataStores"]["variable"] model_data["body"]["parameters"] = source_data["body"]["parameters"] json.encode_to_file("./json/example.json", model_data, overwrite=True) return model_data, whether_signed
import sys from graph import Graph from reverse_complementary import reverse_complementary from process import process dir='./data/'+sys.argv[1]+'/' s= open(dir+'seq.txt') seq=s.readline().replace('\n', '') len_of_seq=seq.__len__() para=demjson.decode_file(dir+'./param.json',None) para['len_of_seq']=len_of_seq para['dir']=dir with open(dir+'1.txt') as f1: with open(dir+'2.txt') as f2: process(f1, f2, seq,demjson.encode(para)) # connections = [(1, 'B'), (3, 'C'), (f1, 'D'), # ('2', 'D'), ('E', 'F'), ('F', 'C')] # g=Graph(connections,False) # print(g._graph) # # # T=ahocorasick.Automaton() #
def open_links(): # if locations does not exist, create it return demjson.decode_file("links.json")
def _decode_file(self, file): if os.path.isfile(file): return demjson.decode_file(file)