def sign_qcloud_url(qcloud_url, qcloud_token, url_path=None, more=None, zero_steps=4): # 默认10000秒的容差, about 3 hours, 相当于 3 个小时左右的跳转 URL 是固定的,方便缓存的逻辑 if url_path is None: url_path = get_url_path(qcloud_url) full_url = qcloud_url else: full_url = "%s/%s" % (qcloud_url.rstrip("/"), url_path.lstrip("/")) if not url_path.startswith("/"): url_path = "/%s" % url_path timestamp = str(int(time.time()))[:-zero_steps] + "0" * zero_steps rand = get_md5("%s-%s-%s" % (url_path, timestamp, server_secret_key)) string_to_hash = "%s-%s-%s-0-%s" % (url_path, timestamp, rand, qcloud_token) hash_md5 = get_md5(string_to_hash) sign = "%s-%s-0-%s" % (timestamp, rand, hash_md5) if "?" in full_url: url = join_url(full_url, sign=sign) else: url = "%s?sign=%s" % (full_url, sign) if more: url = "%s&%s" % (url, more) return url
def _func(*args, **kwargs): #the real func g = get_g() if cache_name == 'no_cache': # 不对结果缓存 return func(*args, **kwargs) # 获得/创建容器 if not hasattr(g, cache_name): setattr(g, cache_name, {}) # 准备key值,可以在容器内进行匹配 values = list(args) + kwargs.keys() + kwargs.values() key_list = [] for value in values: if isinstance(value, dict) and '_id' in value: key_list.append(smart_unicode(value['_id'])) else: key_list.append(smart_unicode(value)) key = "-".join(key_list) if key: key = get_md5(key) # 可以避免key太长了 else: # 如果是empty,每次的get_hash_key都是不一样的,意味着直接调用cache_result的,都会失效 key = '_' # 如不匹配,则进行计算,并存入容器 if key not in getattr(g, cache_name): # 仅判断key的存在,不做key对应的值是否存在 result = func(*args, **kwargs) getattr(g, cache_name)[key] = result return getattr(g, cache_name).get(key) # 返回匹配值
def get_avatar_id(email): # 根据邮件地址,或者对应的m5值作为avatar_id if isinstance(email, string_types) and email: email = email.lower().strip() if '@' in email: return get_md5(email) else: return email return None # if not
def get_comments_by_comments_doc(comments_doc, as_tree=True, utc_offset=8): if not comments_doc: return [] comments = comments_doc.get('objects') or [] for comment in comments: comment['author'] = get_comment_author_name(comment.get('author'), comment.get('email')) comment['avatar'] = get_comment_avatar(comment.get('email')) # 赋予 comment id 的属性, 对 tree 的性质才有意义 email = comment.get('email') or '' comment_date = comment.get('date') or '' origin_id = '%s %s' % (email, comment_date) comment['origin_id'] = origin_id comment['id'] = get_md5(origin_id) date = comment.get('date') if date and isinstance(date, string_types): try: date = utc_date_parse(date, utc_offset=utc_offset) comment['date'] = date except: pass if comment.get('reply'): # 回复于某个 comment comment['reply_to_id'] = get_md5(comment.get('reply')) # 处理为children if as_tree: raw_comments = comments comments_map = {} comments = [] for comment in raw_comments: comments_map[comment.get('id')] = comment for comment in raw_comments: if comment.get('reply_to_id'): parent_id = comment.get('reply_to_id') parent_comment = comments_map.get(parent_id) if parent_comment: parent_comment.setdefault('children', []).append(comment) continue # 不隶属于某个父 comment 的 comments.append(comment) return comments
def get_signature_for_bucket(bucket, timestamp=None, salt=None): # <timestamp>-<signature> timestamp = timestamp or int(time.time()) value_to_hash = "%s-%s-%s" % (timestamp, bucket, server_secret_key) if salt: value_to_hash = "%s-%s" % (value_to_hash, salt) signature_body = get_md5(value_to_hash) signature = "%s-%s" % (timestamp, signature_body) return signature
def render_jade_template(template_filepath, after_render_func=None, *args, **kwargs): from farbox_bucket.server.template_system.env import farbox_bucket_env # 主要渲染本地的模板文件,可以传入一个env,这样可以确定模板的root global local_jade_templates env = kwargs.pop('env', None) or farbox_bucket_env if template_filepath in local_jade_templates and not DEBUG: template = local_jade_templates.get(template_filepath) else: if not os.path.isfile(template_filepath): return # 模板文件不存在, ignore with open(template_filepath) as f: source = f.read() template = None if DEBUG: template_md5 = get_md5(source) if template_md5 in local_jade_templates: # 文件实际上是缓存了的 template = local_jade_templates.get(template_md5) template = template or jade_to_template(source, env=env) #if DEBUG: #print template.source local_jade_templates[template_filepath] = template if DEBUG: template_md5 = get_md5(source) local_jade_templates[template_md5] = template return_html = kwargs.pop('return_html', False) html_source = template.render(*args, **kwargs) if after_render_func: html_source = after_render_func(html_source) if return_html: return '\n'+html_source else: response = make_response(html_source) return response
def file_version(self): # if is_deleted = True, will not get the file version, only get it by self.version if self._raw_content: return get_md5(self._raw_byte_content) if self.is_dir: version = None elif self.abs_filepath and os.path.isfile(self.abs_filepath): version = md5_for_file(self.abs_filepath) else: version = None return version
def replace_vars_in_scss(raw_content, new_vars, compile_scss=False): if not raw_content: # return nothing return '' new_content = scss_key_value_compiler.sub( curry(replace_var_in_scss_func, new_vars=new_vars), raw_content) if compile_scss: # 编译为普通的css内容 new_content = compile_css_with_timeout(new_content) raw_version = get_md5(raw_content) new_content = '/*%s*/\n%s' % ( raw_version, new_content ) # compile 之后,保留原始内容的version,这样后续,可以进行对比,是否需要更新 return new_content
def do_get_smart_scss_url(scss_filepath, **kwargs): # 根据其内容内的变量名,进行替换处理; # 总是要读取源文件的,不然不知道是否要重新编译; 由于页面本身的缓存逻辑,性能影响有限 # filename.scss -> filename-xxxxxxxxxxxx.css ext = os.path.splitext(scss_filepath)[-1] if ext not in [".less", ".scss"]: return scss_filepath #ignore prefix_name = get_prefix_name_from_source_filepath(scss_filepath) filename = get_md5(get_md5(kwargs.keys()) + get_md5(kwargs.values())) + '.css' filepath = '/_cache/scss/%s-%s' % (prefix_name, filename) bucket = get_bucket_in_request_context() or get_bucket_from_request() if not bucket: return scss_filepath # ignore if has_record_by_path(bucket, path=filepath): # 缓存的文件已经存在了 return filepath raw_content = "" if scss_filepath.startswith("/fb_static/"): raw_content = get_static_raw_content(scss_filepath) else: bucket = get_bucket_in_request_context() if bucket: raw_content = get_raw_content_by_path(bucket=bucket, path=scss_filepath) if not raw_content: return scss_filepath #ignore css_content = replace_vars_in_scss(raw_content=raw_content, new_vars=kwargs, compile_scss=True) sync_file_by_server_side(bucket=bucket, relative_path=filepath, content=css_content) return filepath
def get_verifier(public_key): if not public_key: return None public_key = public_key.strip() cache_key = get_md5(public_key) if cache_key in verifier_cache: return verifier_cache[cache_key] if not '-BEGIN PUBLIC KEY-' in public_key: public_key = "-----BEGIN PUBLIC KEY-----\n%s\n-----END PUBLIC KEY-----" % public_key public_key = RSA.importKey(public_key) verifier = PKCS1_v1_5.new(public_key) verifier_cache[cache_key] = verifier return verifier
def is_trade_under_this_site(self): # 校验当前回调的交易,是不是从当前网站发出去的 out_trade_no = request.values.get('out_trade_no') or '' if '-' not in out_trade_no: # 没有在trade_no 中增加site的信息,ignore掉, return True return True bucket = get_bucket_in_request_context() if bucket: # site_id hashed 长度为 32, 总长度为 14+8+1+32=55 < 64 hashed_bucket_id = get_md5(bucket) # 主要用作校验,避免跨站的交易被验证成功 if out_trade_no and out_trade_no.endswith( '-%s' % hashed_bucket_id): return True return False
def get_cache_key_for_page(): bucket = get_bucket_in_request_context() if not bucket: return bucket_last_updated_at = get_bucket_last_updated_at(bucket) if not bucket_last_updated_at: return url = request.url raw_cache_key = '%s-%s-%s-%s' % (bucket, bucket_last_updated_at, url, PAGE_CACHE_VERSION) lang = get_language() if lang: raw_cache_key += "-%s" % lang cache_key = "page-%s" % get_md5(raw_cache_key) return cache_key
def get_template_by_env(source, try_jade=True): # 由某个指定的 env,解析 template, 并处理 cache template_key = get_md5(source) if template_key in env_templates_cache: return env_templates_cache[template_key] if try_jade: try: template = jade_to_template(source, env=farbox_bucket_env) except: template = farbox_bucket_env.from_string('<b style="color:red">`code` block means template source code,' ' error format will break current page!!</b>') else: template = farbox_bucket_env.from_string(source) env_templates_cache[template_key] = template return template
def get_out_trade_no(self, prefix=''): # 64位最长 # 商户网站唯一订单号 prefix = smart_unicode(prefix)[:10] # 最多10位 now = get_cn_timestamp('%Y%m%d%H%M%S') # 14位了已经 uid = uuid.uuid1().hex[:8] no = '%s%s%s' % (prefix, now, uid) try: bucket = get_bucket_in_request_context() if bucket: # site_id hashed 长度为 32, 总长度为 14+8+1+32=55 < 64 hashed_bucket_id = get_md5(bucket) # 主要用作校验,避免跨站的交易被验证成功 no = '%s-%s' % (no, hashed_bucket_id) except: pass return no
def render_jade_source(source, cache_key=None, cache_space=None, env=None, **kwargs): # 返回的是 html 源码, 指定一个 source,直接进行 compile # env 是当前的环境 # cache_key 是缓存 key,如果没有指定,则是源码的 md5 值 # cache_space 是缓存的存储空间 if not cache_key: cache_key = get_md5(source) if cache_space is None or not isinstance(cache_space, dict): cache_space = default_jade_source_cache_space if cache_key in cache_space: template = cache_space[cache_key] else: template = jade_to_template(source, env=env) cache_space[cache_key] = template html = template.render(**kwargs) return html
def render_template_source(template_source, *args, **kwargs): if not template_source: return '' try: api_template_env = get_api_template_env() template_source_md5 = get_md5(template_source) template = template_source_cache.get(template_source_md5) if not template: template = api_template_env.from_string(template_source) template_source_cache[template_source_md5] = template html_content = template.render(*args, **kwargs) except HTTPException as e: # 410 是内部用来跳转用得 raise e except Exception as e: if DEBUG: raise e if sentry_client: sentry_client.captureException() html_content = '<div style="color:red">api template error</div>' return html_content
def sync_file_by_server_side(bucket, relative_path, content=None, is_dir=False, is_deleted=False, return_record=False, real_relative_path=None): utc_offset = get_bucket_utc_offset(bucket=bucket) data = get_compiler_data_directly(relative_path, content=content, is_dir=is_dir, is_deleted=is_deleted, real_relative_path=real_relative_path, utc_offset=utc_offset) if content: data["size"] = len(content) if not data["version"]: data["version"] = get_md5(content) file_version = data.get("version") if file_version: old_record = get_record_by_path(bucket=bucket, path=relative_path, force_dict=True) if old_record: old_file_version = old_record.get("version") if file_version == old_file_version: # 路径 & 内容一致,不做处理 return if not data: result = 'no data to create a record' else: result = create_record(bucket=bucket, record_data=data, file_content=content, return_record=return_record) # error_info/ None / record_data(dict) if return_record and not isinstance(result, dict): return None else: return result
def get_data_to_post(node, private_key, message='', action='record'): message = message or '' node_url = get_node_url(node) # #return logging.error('bucket or private_key can not be found') bucket = get_bucket_by_private_key(private_key) if not bucket or not private_key: return None, None public_key = get_public_key_from_private_key(private_key) if not public_key: # private key is error? return None, None if not isinstance(message, string_types): message = json.dumps(message) # no indent data_to_post = dict( bucket=bucket, action=action, timestamp=int(time.time()), data=message, ) if action in ['create_bucket', 'check'] or action.startswith( 'config'): # create bucket, should put public_key in data data_to_post['public_key'] = public_key signature = sign_by_private_key(private_key, content=data_to_post) data_to_post['signature'] = signature # 给 server 端一个 private_key 的 md5 值,做一些敏感字段的弱加密、解密用的 clean_private_key = re.sub('\s', '', private_key, flags=re.M) private_key_md5 = get_md5(clean_private_key) data_to_post['private_key_md5'] = private_key_md5 # data 压缩一下 if isinstance(message, string_types): data_to_post['data'] = gzip_content(message, base64=True) return node_url, data_to_post
def compile_css(content): hash_key = get_md5(content) return compile_css_with_timeout(content, timeout=2, hash_key=hash_key, cache_client=get_cache_client())
def sync_site_folder_simply( node, root, private_key, should_encrypt_file=False, app_name_for_sync=None, print_log=True, exclude_rpath_func=None, ): if not node or not root or not private_key: return # ignore if not os.path.isdir(root): return # ignore if not is_valid_private_key(private_key): return # ignore now = time.time() app_name_for_sync = app_name_for_sync or 'farbox_bucket' site_folder_status_config_filepath = join( root, '.%s_site_folder_status.json' % app_name_for_sync) site_folder_status = load_json_file( site_folder_status_config_filepath) or {} bucket = get_bucket_by_private_key(private_key) old_bucket = site_folder_status.get('bucket') old_node = site_folder_status.get('node') if bucket != old_bucket or node != old_node: # bucket or node changed, reset the sync clear_sync_meta_data(root=root, app_name=app_name_for_sync) site_folder_status['bucket'] = bucket site_folder_status['node'] = node # configs 的逻辑也调整下 for key in site_folder_status: if key.endswith('_md5'): site_folder_status.pop('key', None) # dump_template first template_folder = get_path_with_dot_allowed(root, 'template') if os.path.isdir(template_folder): pages_data = get_pages_data(template_folder) current_pages_md5 = get_md5(json_dumps(pages_data, indent=4)) old_pages_md5 = site_folder_status.get('pages_md5') if current_pages_md5 != old_pages_md5: # 模板发生变化 old_pages_data = site_folder_status.get('pages') or {} sync_status = dump_pages( node=node, private_key=private_key, pages_dir=template_folder, old_pages_data=old_pages_data, ) sync_status_code = sync_status.get('code') if sync_status_code != 200: if print_log: print(sync_status.get('message')) return else: # update pages_md5 site_folder_status['pages_md5'] = current_pages_md5 site_folder_status['pages'] = pages_data if print_log: print('template is changed and synced') # update files first files_changed = sync_folder_simply(node=node, root=root, private_key=private_key, should_encrypt_file=should_encrypt_file, app_name_for_sync=app_name_for_sync, exclude_rpath_func=exclude_rpath_func) # update configs for config_type in allowed_bucket_config_types: sync_bucket_config(site_folder_status, root=root, node=node, private_key=private_key, config_type=config_type, print_log=print_log) # store the site_folder_status dump_json_file(filepath=site_folder_status_config_filepath, data=site_folder_status)
def md5(obj): # 返回 md5 值 try: return get_md5(obj) except: return obj
def get_dom_id(self, v=None): # 将一个value转为dom_id的格式 return 'dd_%s' % get_md5(v)
def get_md5_for_key(key): clean_key = re.sub('\s', '', key, flags=re.M) return get_md5(clean_key)
def compile_jade(content): hash_key = get_md5(content) return convert_jade_to_html(content, hash_key=hash_key, cache_client=get_cache_client())
def id(self): # the comment id origin_id = '%s %s' % (self.email, self.date) return get_md5(origin_id)
return # ignore response_content = smart_str(response.content) if not response_content: return sync_file_by_server_side(bucket=bucket, relative_path=path, content=response_content) # 进行同步 return True except: pass def download_from_internet_and_sync(bucket, url, folder_to_save='/_data/downloads', path=None, timeout=10, force=False, async=True): # 从互联上下载内容 if not has_bucket(bucket): return "" if not path: # 自动生成doc_path, 以url为md5作为filename url_path = get_url_path(url) ext = os.path.splitext(url_path)[-1] url_md5 = get_md5(url) filename = url_md5 + ext path = '/%s/%s' % (folder_to_save.strip('/'), filename) if not force: # 非强制的情况下,如果文件已经存在,就不下载了 if has_record_by_path(bucket, path): return path if async: gevent.spawn(do_download_from_internet_and_sync, bucket, path, url=url, timeout=timeout) else: do_download_from_internet_and_sync(bucket, path, url, timeout=timeout) return path