def recursive_find_comment(comments): for comment in comments: comment = objid_to_str( comment, [ "_id", "user_id", "audit_user_id", ""]) comment["date"] = time_to_utcdate( time_stamp=comment["issue_time"], tformat="%Y-%m-%d %H:%M") if current_user.is_authenticated: r = mdbs["user"].db.user_like.find_one( {"user_id": current_user.str_id, "type": "comment", "values": comment["_id"]}) if r: comment["like_it_already"] = True # 评论下面的所有回复 query_conditions = {} query_conditions['issued'] = 1 query_conditions['is_delete'] = 0 query_conditions['audit_score'] = { "$lt": get_config( "content_inspection", "ALLEGED_ILLEGAL_SCORE")} query_conditions["reply_id"] = comment["_id"] reply_comments = mdbs["web"].db.comment.find( query_conditions).sort([("issue_time", -1)]) if reply_comments.count(True): comment["reply"] = objid_to_str( list(reply_comments), [ "_id", "user_id", "audit_user_id"]) comment["reply"] = recursive_find_comment(comment["reply"]) return comments
def get_email_html(data): ''' 获取发送邮件使用的html模板 :param data: 需要再模板中使用的数据, 使用Jinjia2 格式:{"title": "标题", "body": "正文, 可以使用html标签", "other_info":"其他信息, 可以使用html标签", } :return: ''' # 查找主题邮件发送html模板 data["app_name"] = get_config("email", "APP_NAME") data["app_logo_url"] = get_config("email", "APP_LOG_URL") conf_site_url = get_config("site_config", "SITE_URL") if conf_site_url: data["site_url"] = url_for("theme_view.index") else: data["site_url"] = url_for("theme_view.index") data["utc_time"] = time_to_utcdate(time_stamp=time.time(), tformat="%Y-%m-%d %H:%M:%S") path = "{}/pages/module/email/send-temp.html".format( get_config("theme", "CURRENT_THEME_NAME")) absolute_path = os.path.abspath("{}/{}".format(theme_view.template_folder, path)) if os.path.isfile(absolute_path): html = render_template(path, data=data) else: # 主题不存页面,使用系统自带的页面 path = "{}/module/email/send-temp.html".format( admin_view.template_folder) html = render_absolute_path_template(path, data=data) return html
def sitemap(): """ sitemap.xml :return: """ ut = time.time() content = "" host_url = request.host_url for n in get_post_page_nums(): content = """ {content} <url> <loc>{domain}st-html/posts/{page}</loc> <lastmod>{date}</lastmod> <changefreq>{freq}</changefreq> <priority>{priority}</priority> </url>""".format(content=content, page=n, domain=host_url, date=time_to_utcdate(ut, "%Y-%m-%d"), freq="daily", priority="0.6") content = """<?xml version="1.0" encoding="utf-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> {content} </urlset>""".format(content=content) absolute_path = os.path.abspath("{}/sitemap.xml".format( static_html_view.template_folder)) with open(absolute_path, "w") as wf: wf.write(content) return send_file(absolute_path)
def upload_file(file=None, prefix="", file_name=None, file_format=None, fetch_url=False, tailoring=None): ''' 文件保存 :param file, 上传文件对象 :param prefix: 给文件名加前缀, 如果prefix中有"/",则系统也会自动根据/来建目录 :param file_name:如果file_name是这样的post-img/xxxx-xxx-xxx.jpg,则系统也会自动根据/来建目录 :param file_format:文件格式,如:png :param tailoring:不建议使用此参数建议再客户端处理完上传 :return: ''' # 如果自定义名字 if file_name: filename = '{}.{}'.format(file_name, file_format) else: filename = '{}-{}.{}'.format(time_to_utcdate(time_stamp=time.time()), uuid1(), file_format) # 本地服务器 if prefix: filename = "{}{}".format(prefix, filename) # 文件保存的绝对路径 save_file_path = "{}/{}/{}".format(STATIC_PATH, get_config("upload", "SAVE_DIR"), filename).replace("//", "/") # 文件保存到本地服务器端 save_dir = os.path.split(save_file_path)[0] if not os.path.exists(save_dir): os.makedirs(save_dir) # 文件保存到本地服务器端 if fetch_url: urllib.request.urlretrieve(fetch_url, save_file_path) elif file: file.save(save_file_path) if tailoring: # 裁剪图片 try: im = Image.open(save_file_path) im = im.rotate(tailoring['rotate']) region = im.crop((tailoring["x"], tailoring["y"], tailoring["x"]+tailoring["width"], tailoring["y"]+tailoring["height"])) region.save(save_file_path) except : pass # 检测文件储存插件 data = plugin_manager.call_plug(hook_name="file_storage", action="upload", localfile_path=save_file_path, filename=filename) if data != "__no_plugin__": # 上传后删除本地文件 local_file_del(save_file_path) result = data else: result = {"key":filename, "bucket_name":None, "d":None, "type":"local"} return result
def fileup_base_64(uploaded_files, file_name=None, prefix=""): ''' 文件以base64编码上传上传 :param uploaded_files: 数组 :param bucket_var: 保存图片服务器空间名的变量名, 如AVA_B :param file_name: :return: ''' if not uploaded_files: return None keys = [] for file_base in uploaded_files: if file_base: # data:image/jpeg file_format = file_base.split(";")[0].split("/")[-1] imgdata = base64.b64decode(file_base.split(",")[-1]) if file_name: filename = '{}.{}'.format(file_name, file_format) else: filename = '{}_{}.{}'.format(time_to_utcdate(time_stamp=time.time(), tformat="%Y%m%d%H%M%S"), uuid1(), file_format) # 本地服务器 if prefix: filename = "{}{}".format(prefix, filename) # 文件保存的绝对路径 save_file_path = "{}/{}/{}".format(STATIC_PATH, get_config("upload", "SAVE_DIR"), filename).replace("//", "/") # 文件保存到本地服务器端 save_dir = os.path.split(save_file_path)[0] if not os.path.exists(save_dir): os.makedirs(save_dir) with open(save_file_path, 'wb') as file_w: file_w.write(imgdata) # 检测文件储存插件 data = plugin_manager.call_plug(hook_name="file_storage", action="upload", localfile_path=save_file_path, filename=filename) if data != "__no_plugin__": # 上传后删除本地文件 local_file_del(save_file_path) key = data else: key = {"key": filename, "bucket_name": None, "d": None, "type": "local"} else: key = None keys.append(key) call_file_detection(keys) return keys
def search_logs(user_id, keyword): ut = time.time() month = time_to_utcdate(ut, "%Y%m") day = time_to_utcdate(ut, "%Y%m%d") mdbs["web"].dbs["search_logs"].update_one( { "user_id": user_id, "search": keyword }, { "$inc": {"num_of_search": 1}, "$addToSet": {"days": day, "months": month}, "$set": { "lasted_time": ut, "status": "normal" } }, upsert=True )
def switch_theme(): ''' 切换主题 :return: ''' theme_name = request.argget.all('theme_name') path = os.path.join(THEME_TEMPLATE_FOLDER, theme_name.strip()) if not os.path.exists(path): data = { "msg": gettext("Theme does not exist"), "msg_type": "e", "http_status": 400 } return data s, r = verify_theme(path, theme_name, theme_name) if s: # 更新主题数据 mdb_sys.db.sys_config.update_many( { "project": "theme", "key": "CURRENT_THEME_NAME" }, { "$set": { "value": theme_name.strip(), "update_time": time.time() } }, upsert=True) mdb_sys.db.sys_config.update_many( { "project": "site_config", "key": "STATIC_FILE_VERSION" }, { "$set": { "value": int(time_to_utcdate(time.time(), "%Y%m%d%H%M%S")), "update_time": time.time() } }, upsert=True) cache.delete(CONFIG_CACHE_KEY) data = { "msg": gettext("Switch success"), "msg_type": "s", "http_status": 201 } else: data = {"msg": r, "msg_type": "e", "http_status": 400} return data
def update_config_file(mdb_sys, *args, **kwargs): ''' 网站启动的时候, 数据库保存的配置-配置文件 同步更新 config mdb_sys redis ''' local_config = deepcopy(CONFIG) overwrite_db = OVERWRITE_DB version_info = mdb_sys.db.sys_config.find_one( {"new_version": { "$exists": True }}) if not version_info: now_version = time_to_utcdate(time_stamp=now_time, tformat="%Y_%m_%d_%H_%M_%S") version_uses = { "new_version": now_version, "used_versions": [now_version], "update_time": now_time } mdb_sys.db.sys_config.insert_one(version_uses) web_start_log.info("Initialize the sys_config version info") else: now_version = version_info["new_version"] if version_info and not overwrite_db: # 查询当前主机web的的配置版本 cur_h_version_info = mdb_sys.db.sys_host.find_one({ "type": "web", "host_info.local_ip": host_info["local_ip"] }) if not cur_h_version_info: cur_h_version_info = { "type": "web", "host_info": host_info, "conf_version": version_info["new_version"], "switch_conf_version": None, "disable_update_conf": 0 } mdb_sys.db.sys_host.insert_one(cur_h_version_info) web_start_log.info("Initialize the host version data") if cur_h_version_info["switch_conf_version"] or cur_h_version_info[ "disable_update_conf"]: # 数据库配置和本地配置合并:保留本地和数据库的key,用于网站版本回滚 if cur_h_version_info[ "switch_conf_version"] and not cur_h_version_info[ "disable_update_conf"]: # 版本切换 now_version = cur_h_version_info["switch_conf_version"] else: # 禁止更新 now_version = cur_h_version_info["conf_version"] confs = mdb_sys.db.sys_config.find({"conf_version": now_version}) if confs.count(True): for conf in confs: if not re.search(r"^__.*__$", conf["key"]): if not conf["project"] in local_config: local_config[conf["project"]] = {} if not conf["key"] in local_config[conf["project"]]: local_config[conf["project"]][conf["key"]] = { "value": conf["value"], "type": conf["type"], "info": conf["info"] } else: local_config[conf["project"]][ conf["key"]]["value"] = conf["value"] web_start_log.info( "Config rollback:[db to file] Update version info") else: web_start_log.error( "Config rollback:[db to file] Rollback failure") return False else: # 数据库最新版配置和本地配置合并:以本地配置的key为准,多余的删除 now_version = version_info["new_version"] confs = mdb_sys.db.sys_config.find({"conf_version": now_version}) if confs.count(True): for conf in confs: is_cft = re.search(r"^__.*__$", conf["key"]) if not is_cft and conf["project"] in local_config.keys( ) and conf["key"] in local_config[conf["project"]].keys(): local_config[conf["project"]][ conf["key"]]["value"] = conf["value"] web_start_log.info( "Config merge:[db to file] {} {}".format( conf["project"], conf["key"])) else: mdb_sys.db.sys_config.delete_one({"_id": conf["_id"]}) web_start_log.info("Remove the config:{} {}".format( conf["project"], conf["key"])) else: web_start_log.error("Config merge:[db to file] Merger failure") return False else: web_start_log.info( "**Local configuration directly covering the latest edition of the configuration database" ) r = push_to_db(mdb_sys, local_config=deepcopy(local_config), now_version=now_version) if not r: web_start_log.error("Config update:[file to db] Push failure") return False # 写配置文件 info = '''#!/usr/bin/env python\n# -*-coding:utf-8-*-\n__author__ = "Allen Woo"\n''' doc = "__readme__='''{}'''\n".format(__readme__) # write config.py temp_conf = str(json.dumps(local_config, indent=4, ensure_ascii=False)) wf = open("{}/apps/configs/config.py".format(PROJECT_PATH), "wb") wf.write(bytes(info, "utf-8")) wf.write(bytes(doc, "utf-8")) # 修改配置为同步数据库配置到文件 wf.write( bytes( "# Danger: If True, the database configuration data will be overwritten\n", "utf-8")) wf.write(bytes("# 危险:如果为True, 则会把该文件配置覆盖掉数据库中保存的配置\n", "utf-8")) wf.write(bytes("OVERWRITE_DB = False\n", "utf-8")) wf.write(bytes("CONFIG = ", "utf-8")) wf.write( bytes( temp_conf.replace("false", "False").replace("true", "True").replace("null", "None"), "utf-8")) wf.close() web_start_log.info("Configuration updates and merge is complete") return True
def login_log(user, client): ''' 登录日志操作 :param user: 用户对象实例 :return: ''' # 更新登录日志 login_info = { 'time': time.time(), 'ip': request.remote_addr, 'geo': reader_city(request.remote_addr), 'client':client } user_login_log = mdb_user.db.user_login_log.find_one({'user_id': user.str_id}) if user_login_log and "login_info" in user_login_log: login_infos = user_login_log["login_info"] else: login_infos = [] login_infos.append(login_info) than_num = len(login_infos) - get_config("weblogger", "SING_IN_LOG_KEEP_NUM") if than_num > 0: del login_infos[0:than_num] mdb_user.db.user_login_log.update_one({'user_id': user.str_id}, {"$set": {"pass_error": 0, "login_info": login_infos} }, upsert=True) # 检查登录地区是否异常 anl = AbnormalLogin(login_infos[0:-1], login_info["geo"]) abr = anl.area() if abr == "abnormal": # 发送邮件 subject = gettext("Abnormal login") try: location = "{}/{}/{}".format( login_info["geo"]["subdivisions"]["name"], login_info["geo"]["country"]["name"], login_info["geo"]["continent"]["name"] ) except: location = None if location: body = gettext("<b>Abnormal login</b><br> Your account <a>{}</a>, is logined in " "<span style='color:#483D8B'>{}</span> " "on {} [UTC Time].<br>").format( user.email, location, time_to_utcdate(tformat="%Y-%m-%d %H:%M:%S") ) data = {"title": subject, "body": body, "other_info": gettext("End"), } html = get_email_html(data) send_email(subject=subject, recipients=[user.email], html_msg=html)
def sys_config_edit(): key = request.argget.all('key') project = request.argget.all('project') value = request.argget.all('value') info = request.argget.all('info') version = mdb_sys.db.sys_config.find_one( {"new_version": { "$exists": True }}, {"_id": 0}) s, r = arg_verify(reqargs=[("key", key), ("project", project)], required=True) if not s: return r old_conf = mdb_sys.db.sys_config.find_one({ "key": key, "project": project, "conf_version": version["new_version"] }) if not old_conf: data = { "msg": gettext("There is no such data"), "msg_type": "e", "http_status": 404 } else: try: if old_conf["type"] == "int" or old_conf["type"] == "binary": value = int(value) elif old_conf["type"] == "float": value = float(value) elif old_conf["type"] == "string": value = str(value) elif old_conf["type"] == "bool": try: value = int(value) if value: value = True else: value = False except: pass if value or (isinstance(value, str) and value.upper() != "FALSE"): value = True else: value = False elif old_conf["type"] == "list": # 如果不是list类型,则转为list类型 if not isinstance(value, list): # "[]"转list value = json.loads(value) if not isinstance(value, list): # "aaa,bbb,ccc"转["aaa", "bbb", "ccc"] value = value.strip(",").split(",") value = [v.strip("\n") for v in value] elif old_conf["type"] == "dict": if not isinstance(value, dict): value = json.loads(value) if not isinstance(value, dict): data = { "msg": gettext( 'The format of the "value" errors, need a "{}" type' ).format(old_conf["type"]), "msg_type": "e", "http_status": 400 } return data elif old_conf["type"] == "tuple": if not isinstance(value, tuple): value = json.loads(value) if not isinstance(value, tuple): data = { "msg": gettext( 'The format of the "value" errors, need a "{}" type' ).format(old_conf["type"]), "msg_type": "e", "http_status": 400 } return data elif old_conf["type"] == "password": value = str(value) else: data = { "msg": gettext('There is no {}').format(old_conf["type"]), "msg_type": "e", "http_status": 400 } return data except Exception as e: data = { "msg": gettext('The format of the "value" errors, need a "{}" type'). format(old_conf["type"]), "msg_type": "e", "http_status": 400 } return data if not info: info = old_conf["info"] conf = {"value": value, "update_time": time.time(), "info": info} # 更新版本 # 解释:只要有一台服务器端重启web并更新配置, 则会把重启时最新版本加入到used_version中 if version["new_version"] in version["used_versions"]: # 如果目前的最新版本号在used_version中, 则本次修改就要生成更新的配置版本 now_version = time_to_utcdate(tformat="%Y_%m_%d_%H_%M_%S") old_version = mdb_sys.db.sys_config.find( { "project": { "$exists": True }, "conf_version": version["new_version"] }, {"_id": 0}) # 生成最新版本配置 for v in old_version: v["conf_version"] = now_version mdb_sys.db.sys_config.insert_one(v) # 更新当前使用的最新版本号 mdb_sys.db.sys_config.update_one( {"new_version": { "$exists": True }}, {"$set": { "new_version": now_version }}) # 删除多余的配置版本 ver_cnt = len(version["used_versions"]) if ver_cnt >= 15: rm_vers = version["used_versions"][0:ver_cnt - 15] mdb_sys.db.sys_config.update_one( {"new_version": { "$exists": True }}, { "$set": { "used_versions": version["used_versions"][ver_cnt - 15:] } }) mdb_sys.db.sys_config.delete_many( {"version": { "$in": rm_vers }}) else: # 否则, 本次修改暂不生成新配置版本 now_version = version["new_version"] # 更新修改数据 mdb_sys.db.sys_config.update_one( { "project": project, "key": key, "conf_version": now_version }, {"$set": conf}, upsert=True) # 删除缓存,达到更新缓存 cache.delete(CONFIG_CACHE_KEY) data = { "msg": gettext("Modify the success"), "msg_type": "s", "http_status": 201 } return data
def comment_access_time(query={}, days=7): now_time = time.time() s_time = now_time - 86400 * days - now_time % 86400 query["$or"] = [{ "issue_time": { "$gte": s_time } }, { "issue_time": { "$gte": s_time } }] m = Code(""" function(){ var newDate = new Date(); newDate.setTime(this.issue_time*1000); var year=newDate.getFullYear(); var month=newDate.getMonth()+1; var date=newDate.getDate(); var g_f = {"date":year*10000+month*100+date, "issue_time":this.issue_time-this.issue_time%86400} var value = {count:1} emit(g_f, value); } """) r = Code(""" function(key,values){ var ret = {count:0}; values.forEach( function(v){ ret.count += v.count; } ); return ret; } """) result = mdbs["web"].db.comment.map_reduce(m, r, out={"inline": 1}, full_response=True, query=query) if result['counts']["output"] > 0: temp_result = sorted(result["results"], key=lambda x: x["_id"]["date"]) last_time = s_time - 86400 for r in temp_result: r_time = r["_id"]["issue_time"] - r["_id"]["issue_time"] % 86400 if r_time > last_time + 86400: for i in range(1, int((r_time - last_time) / 86400)): last_time += 86400 result["results"].append({ "_id": { "issue_time": last_time, "date": time_to_utcdate(last_time, "%Y%m%d") }, "value": { "count": 0 } }) last_time += 86400 else: last_time += 86400 result["results"] = sorted(result["results"], key=lambda x: x["_id"]["date"]) return result["results"] else: return {}
def update_pylib(venv_path=True, latest=False, is_yes=False): """ 更新python环境库 :param input_venv_path: :return: """ if venv_path == "default" and not is_yes: input_str = input( "Already running this script in your project python virtual environment?(yes/no):\n" ) if input_str.upper() == "YES": venv_path = None else: venv_path = input("Enter a virtual environment:\n") elif venv_path == "null": venv_path = None if venv_path: if os.path.exists("{}/bin/activate".format(venv_path)): venv = ". {}/bin/activate && ".format(venv_path) else: venv = ". {}/bin/activate && ".format(sys.prefix) else: venv = "" # 检查网络情况 is_time_out = False try: requests.get("https://www.bing.com/", timeout=10) except Exception as e: is_time_out = True print(e) if not is_time_out: print(" * Update pip...") s, r = subprocess.getstatusoutput("{}pip3 install -U pip".format(venv)) print(" {}".format(r)) else: print(" \033[31m ** Timeout for connecting to external network\033[0m") s, r = subprocess.getstatusoutput("{}pip3 freeze".format(venv)) venv_exist_libs = r.split() req_txt_filepath = "{}/requirements.txt".format(PROJECT_PATH) with open(req_txt_filepath) as rf: # req_file_libs req_file_libs = rf.read().split() # 查找需要安装的包 if latest: need_install_libs = req_file_libs[:] else: need_install_libs = list( set(req_file_libs).difference(set(venv_exist_libs))) for pylib in need_install_libs[:]: if "==" not in pylib: need_install_libs.remove(pylib) print(" * Libraries that need to be installed: {}".format( len(need_install_libs))) print(" ".join(need_install_libs)) # Install install_failed = [] installed = [] if not is_time_out: for sf in need_install_libs: if latest: sf = sf.split("==")[0] shcmd = "{}pip3 install -U {}".format(venv, sf) print(shcmd) s, r = subprocess.getstatusoutput(shcmd) if s: install_failed.append(sf) print("\033[31m Failed\033[0m") else: installed.append(sf) print(" Succeeded") # Try again for sf in install_failed: s, r = subprocess.getstatusoutput("{}pip3 install -U {}".format( venv, sf)) if not s: install_failed.remove(sf) installed.append(sf) # 查找需要卸载的软件包 s, r = subprocess.getstatusoutput("{}pip3 freeze".format(venv)) venv_libs = r.split() temp_venv_libs = [] for sf in venv_libs: if "==" in sf: temp_venv_libs.append(sf.split("==")[0]) if latest: unwanted_libs = list(set(temp_venv_libs).difference(set(installed))) else: temp_req_file_libs = [] for sf in req_file_libs: if "==" in sf: temp_req_file_libs.append(sf.split("==")[0]) unwanted_libs = list( set(temp_venv_libs).difference(set(temp_req_file_libs))) for sf in unwanted_libs[:]: if "==" not in sf: unwanted_libs.remove(sf) print(" \n*[Succeeded] Libs: {}".format(len(installed))) print(" ".join(installed)) print(" \033[31m\n* [Failed] Libs, please manually install: {}".format( len(install_failed))) print(" ".join(install_failed)) print(" \n* Now don't need python library: {}".format(len(unwanted_libs))) print(" ".join(unwanted_libs)) print("\033[0m") if latest: with open(req_txt_filepath, "w") as wf: wf.write("\n".join(venv_libs)) update_log = "{}/pylibs_update.log".format(LOG_PATH) with open(update_log, "w") as wf: lines = [ "Python libraries and version numbers before{}\n".format( time_to_utcdate(time.time(), "%Y/%m/%d %H:%M:%S")), "\n".join(req_file_libs) ] wf.writelines(lines) msg = " * The library that installed the latest version has been opened,\n" \ " and the requirement file has been rewritten.\n" \ " To view the previous version, please check the file:\n {}\n".format(update_log) print(msg)
def update_config_file(mdbs, *args, **kwargs): """ 网站启动的时候, 数据库保存的配置-配置文件 同步更新 config mdb_sys redis """ local_config = deepcopy(CONFIG) overwrite_db = OVERWRITE_DB version_info = mdbs["sys"].db.sys_config.find_one( {"new_version": { "$exists": True }}) if not version_info: now_version = time_to_utcdate(time_stamp=now_time, tformat="%Y_%m_%d_%H_%M_%S") version_uses = { "new_version": now_version, "used_versions": [now_version], "update_time": now_time } mdbs["sys"].db.sys_config.insert_one(version_uses) sys_version_of_config = SYS_CONFIG_VERSION last_update_time = now_time web_start_log.info("Initialize the sys_config version info") else: now_version = version_info["new_version"] last_update_time = version_info["update_time"] if "sys_version_of_config" in version_info: sys_version_of_config = version_info["sys_version_of_config"] else: sys_version_of_config = SYS_CONFIG_VERSION mdbs["sys"].db.sys_config.update_one( {"new_version": { "$exists": True }}, {"$set": { "sys_version_of_config": SYS_CONFIG_VERSION }}) if version_info and not overwrite_db: # 查询当前主机web的的配置版本 cur_h_version_info = mdbs["sys"].db.sys_host.find_one({ "type": "web", "host_info.local_ip": host_info["local_ip"] }) if not cur_h_version_info: cur_h_version_info = { "type": "web", "host_info": host_info, "conf_version": version_info["new_version"], "switch_conf_version": None, "disable_update_conf": 0 } mdbs["sys"].db.sys_host.insert_one(cur_h_version_info) web_start_log.info("Initialize the host version data") if cur_h_version_info["switch_conf_version"] or cur_h_version_info[ "disable_update_conf"]: # 数据库配置和本地配置合并:保留本地和数据库的key,用于网站版本回滚 if cur_h_version_info[ "switch_conf_version"] and not cur_h_version_info[ "disable_update_conf"]: # 版本切换 now_version = cur_h_version_info["switch_conf_version"] else: # 禁止更新 now_version = cur_h_version_info["conf_version"] confs = mdbs["sys"].db.sys_config.find( {"conf_version": now_version}) if confs.count(True): for conf in confs: if not re.search(r"^__.*__$", conf["key"]): if not conf["project"] in local_config: local_config[conf["project"]] = {} if not conf["key"] in local_config[conf["project"]]: local_config[conf["project"]][conf["key"]] = { "value": conf["value"], "type": conf["type"], "info": conf["info"] } else: local_config[conf["project"]][ conf["key"]]["value"] = conf["value"] web_start_log.info( "Config rollback:[db to file] Update version info") else: web_start_log.error( "Config rollback:[db to file] Rollback failure") return False else: # 数据库最新版配置和本地配置合并:以本地配置的key为准,多余的删除 # 数据库最新版配置和本地配置合并:以本地配置的key为准,多余的删除 ago_time = now_time - 3600 * 24 if sys_version_of_config >= SYS_CONFIG_VERSION and last_update_time > ago_time: # 系统正在使用的SYS_CONFIG_VERSION版本和当前机器CONFIG的一样,或更高 # And: 配置24小时内已有更新 # So: 这次不更新 msg = " * [sys configs] Not updated. The system is using the same or higher configuration version.\n" \ " And it was executed within 24 hours.\n" start_info_print("\033[33m{}\033[0m".format(msg)) return True now_version = version_info["new_version"] confs = mdbs["sys"].db.sys_config.find( {"conf_version": now_version}) if confs.count(True): for conf in confs: is_cft = re.search(r"^__.*__$", conf["key"]) if not is_cft and conf["project"] in local_config.keys() \ and conf["key"] in local_config[conf["project"]].keys(): local_config[conf["project"]][ conf["key"]]["value"] = conf["value"] web_start_log.info( "Config merge:[db to file] {} {}".format( conf["project"], conf["key"])) else: mdbs["sys"].db.sys_config.delete_one( {"_id": conf["_id"]}) web_start_log.info("Remove the config:{} {}".format( conf["project"], conf["key"])) else: web_start_log.error("Config merge:[db to file] Merger failure") return False else: web_start_log.info( "**Local configuration directly covering the latest edition of the configuration database" ) r = push_to_db(mdbs, local_config=deepcopy(local_config), now_version=now_version) if not r: web_start_log.error("Config update:[file to db] Push failure") return False # 写配置文件 # info = """#!/usr/bin/env python\n# -*-coding:utf-8-*-\n__author__ = "Allen Woo"\n""" # doc = "__readme__= '''{}'''\n".format(__readme__) # # # write config.py # # # 把password类型替换成*, 再写入文件,防止提交代码时把密码上传到git # for k, v in local_config.items(): # for k1, v1 in v.items(): # if k1.startswith("__") and k1.endswith("__"): # continue # if "type" in v1 and v1["type"] == "password": # # 由于上一个版的password已被替换,现在需要把把它写写入到CONFIG中 # CONFIG[k][k1]["value"] = v1["value"] # # 替换密码 # v1["value"] = "<Your password>" # # temp_conf = str(json.dumps(local_config, indent=4, ensure_ascii=False)) # wf = open("{}/apps/configs/config.py".format(PROJECT_PATH), "wb") # wf.write(bytes(info, "utf-8")) # wf.write(bytes(doc, "utf-8")) # # 修改配置为同步数据库配置到文件 # wf.write(bytes("# Danger: If True, the database configuration data will be overwritten\n", "utf-8")) # wf.write(bytes("# 危险:如果为True, 则会把该文件配置覆盖掉数据库中保存的配置\n", "utf-8")) # wf.write(bytes("OVERWRITE_DB = False\n", "utf-8")) # wf.write(bytes("CONFIG = ", "utf-8")) # wf.write(bytes(temp_conf.replace("false", # "False").replace("true", # "True").replace("null", # "None"), # "utf-8")) # wf.close() web_start_log.info("Configuration updates and merge is complete") return True
def login_log(user, client): """ 登录日志操作 :param user: 用户对象实例 :return: """ # 更新登录日志 login_info = { 'time': time.time(), 'ip': request.remote_addr, 'geo': reader_city(request.remote_addr), 'client': client } user_login_log = mdbs["user"].db.user_login_log.find_one( {'user_id': user.str_id}) if user_login_log and "login_info" in user_login_log: login_infos = user_login_log["login_info"] else: login_infos = [] login_infos.append(login_info) than_num = len(login_infos) - \ get_config("weblogger", "SING_IN_LOG_KEEP_NUM") if than_num > 0: del login_infos[0:than_num] mdbs["user"].db.user_login_log.update_one({'user_id': user.str_id}, {"$set": {"pass_error": 0, "login_info": login_infos} }, upsert=True) # 检查登录地区是否异常 anl = AbnormalLogin(login_infos[0:-1], login_info["geo"]) abr = anl.area() if abr == "abnormal": # 发送邮件 subject = gettext("Abnormal login") try: location = "{}/{}/{}".format( login_info["geo"]["subdivisions"]["name"], login_info["geo"]["country"]["name"], login_info["geo"]["continent"]["name"] ) except BaseException: location = None if location: body = [ gettext("Abnormal login"), gettext("Your account {} , is logined in {} on {} [UTC Time].").format( user.email, location, time_to_utcdate(tformat="%Y-%m-%d %H:%M:%S") ) ] data = { "title": subject, "username": user["username"], "body": body, "site_url": get_config("site_config", "SITE_URL") } html = get_email_html(data) msg = { "subject": subject, "recipients": [user["email"]], "html_msg": html } send_email(msg=msg, ctype="nt")
def copy_file(from_file_url_obj=None, from_path=None, replica_prefix="", replica_file_name=None, replica_file_format=None): """ 文件复制,同一个区域的文件才可以复制 :param file_url_obj:和from_path二选1 :param from_path: :param replica_prefix: :param replica_file_name: :param replica_file_format: :return: """ # 如果自定义名字 if replica_file_name: filename = '{}.{}'.format(replica_file_name, replica_file_format) else: filename = '{}-{}.{}'.format(time_to_utcdate(time_stamp=time.time()), uuid1(), replica_file_format) if replica_prefix: filename = "{}{}".format(replica_prefix, filename) filename = filename.replace("//", "/") if from_file_url_obj: if from_file_url_obj['type'] == "local": # 是复制本地图片 from_local_path = get_localfile_path(from_file_url_obj) # 文件保存的绝对路径 save_file_path = "{}/{}/{}".format( STATIC_PATH, get_config("upload", "SAVE_DIR"), filename).replace("//", "/") # 文件保存到本地服务器端 save_dir = os.path.split(save_file_path)[0] if not os.path.exists(save_dir): os.makedirs(save_dir) shutil.copyfile(from_local_path, save_file_path) result = { "key": filename, "bucket_name": None, "d": None, "type": "local" } return result else: # 检图床插件 data = plugin_manager.call_plug(hook_name="file_storage", action="copy_file", file_url_obj=from_file_url_obj, filename=filename) if data == "__no_plugin__": return None else: return data elif from_path: # 检图床插件 data = plugin_manager.call_plug( hook_name="file_storage", action="upload", localfile_path=from_path, filename=filename, ) if data == "__no_plugin__": # 文件保存的绝对路径 save_file_path = "{}/{}/{}".format( STATIC_PATH, get_config("upload", "SAVE_DIR"), filename).replace("//", "/") # 文件保存到本地服务器端 save_dir = os.path.split(save_file_path)[0] if not os.path.exists(save_dir): os.makedirs(save_dir) shutil.copyfile(from_path, save_file_path) result = { "key": filename, "bucket_name": None, "d": None, "type": "local" } return result else: return data
def profile_update(): """ 用户信息更新 :return: """ gender = request.argget.all('gender', 'secret') birthday = request.argget.all('birthday') homepage = request.argget.all('homepage') address = json_to_pyseq(request.argget.all('address', {})) info = request.argget.all('info') if len(birthday) != 8: data = { 'msg': gettext("The date of birth requires an 8-digit date,Such as '{}'"). format(time_to_utcdate(tformat="%Y%m%d")), 'msg_type': "e", "custom_status": 400 } return data birthday = int(birthday) s, r = arg_verify(reqargs=[(gettext("gender"), gender)], only=["secret", "m", "f"]) if not s: return r addr_keys = ['countries', 'provinces', 'city', 'district', 'detailed'] for k, v in address.items(): if not (k in addr_keys) or not isinstance(v, str): data = { 'msg': gettext( "Address format is not in conformity with the requirements" ), 'msg_type': "e", "custom_status": 400 } return data if homepage: s, r = url_format_ver(homepage) if not s: return {"msg": r, "msg_type": "w", "custom_status": 403} r = content_attack_defense(info) if r["security"] < 100: data = { 'msg': gettext("User profile information is illegal"), 'msg_type': "e", "custom_status": 400 } return data update_data = { 'gender': gender, 'homepage': homepage, 'introduction': info, 'birthday': birthday, 'address': address } r = update_one_user(user_id=current_user.str_id, updata={"$set": update_data}) if r.modified_count: # 清理user信息数据缓存 delete_user_info_cache(user_id=current_user.str_id) data = { 'msg': gettext("Update succeed"), 'msg_type': "s", "custom_status": 201 } else: data = { 'msg': gettext("No changes"), 'msg_type': "w", "custom_status": 201 } return data
def push_url_to_db(app): """ 同步url到数据库 :param app: :return: """ # back up now_time = time.time() ud = time_to_utcdate(now_time, "%Y%m%d%H") days_ago_t = now_time - 86400 * 7 days_ago_d = time_to_utcdate(days_ago_t, "%Y%m%d%H") if not mdbs["sys"].dbs["sys_urls_back"].find_one({"backup_time": ud}): sys_urls = list(mdbs["sys"].dbs["sys_urls"].find({}, {"_id": 0})) for sys_url in sys_urls: sys_url["backup_time"] = ud mdbs["sys"].dbs["sys_urls_back"].insert(sys_urls) mdbs["sys"].dbs["sys_urls_back"].delete_many( {"backup_time": { "$lt": days_ago_d }}) for rule in app.url_map.iter_rules(): if rule.endpoint.startswith("api.") or rule.endpoint.startswith( "open_api."): type = "api" else: continue now_time = time.time() r = mdbs["sys"].dbs["sys_urls"].find_one( {"url": rule.rule.rstrip("/")}) if not r: # 不存在 mdbs["sys"].dbs["sys_urls"].insert_one({ "url": rule.rule.rstrip("/"), "methods": list(rule.methods), "endpoint": rule.endpoint, "custom_permission": {}, "type": type, "create": "auto", "update_time": now_time }) elif r: new_methods = list(rule.methods) old_methods = r["methods"] if old_methods: new_methods.extend(old_methods) else: old_methods = [] new_methods = list(set(new_methods)) mdbs["sys"].dbs["sys_urls"].update_one({"_id": r["_id"]}, { "$set": { "methods": new_methods, "endpoint": rule.endpoint, "type": type, "create": "auto", "update_time": now_time } }) new_methods.sort() old_methods.sort() if new_methods != old_methods or rule.endpoint != r["endpoint"]: # 清理缓存 cache.delete_autokey(fun="get_sys_url", key_base64=False, db_type="redis", url=r["url"]) """ # 清理已不存在的api # 时间7天是为了防止多台服务器同时启动时造成误删 """ ut = time.time() - 86400 * 7 mdbs["sys"].dbs["sys_urls"].delete_many({ "type": { "$ne": "page" }, "update_time": { "$lt": ut } })