def create_private_public_keys(password=None, is_clean=True, key_size=4096): private_key = rsa.generate_private_key( public_exponent=65537, key_size=key_size, backend=default_backend() ) public_key = private_key.public_key() if password: password = to_bytes(password) if password: encryption_algorithm = serialization.BestAvailableEncryption(password) else: encryption_algorithm = serialization.NoEncryption() private_key_bytes = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, #.TraditionalOpenSSL, encryption_algorithm=encryption_algorithm, ) public_key_bytes = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo, ) private_key_s = to_unicode(private_key_bytes) public_key_s = to_unicode(public_key_bytes) if is_clean: private_key_s = to_clean_key(private_key_s) public_key_s = to_clean_key(public_key_s) return private_key_s, public_key_s
def get_url_path(record_data): url_path = record_data.get('url_path') if url_path and not isinstance(url_path, string_types): url_path = to_unicode(url_path) if not isinstance(url_path, string_types): url_path = None if not url_path and 'url' in record_data: _url = record_data.get('url') if isinstance(_url, string_types) and '://' not in _url: url_path = to_unicode(_url).lstrip('/') return url_path
def get_data_type(record_data): data_type = record_data.get('_type') or record_data.get('type') if data_type and isinstance(data_type, string_types): if not are_letters(data_type): data_type = None else: data_type = to_unicode(data_type).strip().lower() return data_type
def get_home_path(): home_path = '' if os.environ.get('HOME'): home_path = to_unicode(os.environ['HOME']) if not os.path.exists(home_path): home_path = '' if not home_path: home_path = '/var' return home_path
def ssdb_data_to_py_data(ssdb_data, hit_cache=False): # 为了避免从 ssdb 中获得数据,反复转为 python 中使用, 增加了 cache_id 的逻辑,以避免重复计算性能消耗的问题 if not isinstance(ssdb_data, string_types): return ssdb_data if hit_cache: data_cache_key = to_md5(ssdb_data) cached_value = ssdb_data_to_py_data_cache.get(data_cache_key) if cached_value: return cached_value else: data_cache_key = None if re.match('\s*[\[\{\(]', ssdb_data): # dict list tuple try: py_data = json_loads(ssdb_data) if data_cache_key: ssdb_data_to_py_data_cache[data_cache_key] = py_data except: py_data = to_unicode(ssdb_data) else: py_data = to_unicode(ssdb_data) return py_data
def to_key(key, is_public_key=False): key = to_unicode(key) if is_public_key: head = '-----BEGIN PUBLIC KEY-----' tail = '-----END PUBLIC KEY-----' else: head = '-----BEGIN PRIVATE KEY-----' tail = '-----END PRIVATE KEY-----' if '-----BEGIN ' not in key: key = head + '\n' + key if '-----END ' not in key: key = key.strip() + '\n' + tail + '\n' key = to_bytes(key) return key
def get_path_from_record(record_data, is_lower=True): # 默认全小写处理 if not isinstance(record_data, dict): return path = (record_data.get('path') or '').strip() if is_lower: path = path.lower() if not isinstance(path, string_types): return path = to_unicode(path) if not path: return #### 从某种角度来说, path 相当于是一个 db 中的唯一 id return path
def convert_jade_to_html(source, hash_key=None, cache_client=None): # 计算 cache_key if hash_key and cache_client: cache_key = 'jade:%s' % hash_key cached = cache_client.get(cache_key, zipped=True) if cached: return cached else: cache_key = None source = to_unicode(source) source = source.strip().replace(u'\ufeff', '') #去除头尾 source = re.sub(r'\\\r?\n', '', source) # 多行分割合并成一行的对应 #source = re.sub(r'\t', ' ', source) # 替换掉 tab 键为空格 source = beautiful_jade(source) source = re.sub(r'((?:^|\n) *)else if ', '\g<1>elif ', source) # 替换else if->elif for func_name in ONE_LINE_FUNCTIONS: # 对单行特定函数,先处理为template的语法 source = re.sub(r'([\r\n] *|^ *)(%s\(.*?\))(?= *[\r\n]|$)' % func_name, '\g<1>{{\g<2>}}', source) # 对单行的load函数的处理,避免被当成一个TagName new_source = process(source, compiler=Compiler) # 对头部代码的补充处理 head_codes_search = re.search(r'<head>.*?</head>', new_source, re.S) if head_codes_search and 'set_content_type(' not in new_source: # 如果有设定content_type的,就不额外处理了 if not new_source.startswith('<!'): new_source = "<!DOCTYPE html>\n" + new_source head_codes = head_codes_search.group() #if not re.search(r'<link.*?rel=["\']alternate["\']', head_codes, re.I): # 增加feed链接 # new_source = new_source.replace('<head>', '<head>%s' % FEED_HTML) if not re.search(r'<meta.*?http-equiv=[\'"]content-type[\'"]', head_codes, re.I): # 增加content_type声明 new_source = new_source.replace('<head>', '<head>%s' % CONTENT_TYPE_HTML) # cache it if cache_key and cache_client: cache_client.set(cache_key, new_source, zipped=True) return new_source
def get_public_key_from_private_key(private_key, password=None, is_clean=True): private_key = to_key(private_key, is_public_key=False) if password: password = to_bytes(password) try: private_key = serialization.load_pem_private_key(private_key, password=password, backend=default_backend()) public_key = private_key.public_key() public_key_bytes = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo, ) public_key_s = to_unicode(public_key_bytes) if is_clean: public_key_s = to_clean_key(public_key_s) return public_key_s except: print('failed to get_public_key_from_private_key') return
def sync_one_file(self, filepath, lower_files_info_on_server=None, lower_folders_info_on_server=None, re_check=False, should_store_files_info=False): if re_check: should_sync = detect_should_sync(filepath=filepath, root=self.root, app_name=self.app_name_for_sync, check_md5=True, extra_should_sync_func=self.should_sync_file_func) if not should_sync: return False synced = False lower_files_info_on_server = lower_files_info_on_server or {} lower_folders_info_on_server = lower_folders_info_on_server or [] is_file = os.path.isfile(filepath) relative_path = get_relative_path(filepath, root=self.root) file_size = os.path.getsize(filepath) file_real_size = file_size if self.should_encrypt_file and self.private_key and is_file: # encrypted_filepath 是一个临时文件 encrypted_filepath = encrypt_file(filepath, encrypt_key=self.private_key) if not encrypted_filepath: return file_real_size = os.path.getsize(encrypted_filepath) ipfs_key = self.add_file_to_ipfs(encrypted_filepath) try: os.remove(encrypted_filepath) except: pass elif is_file: ipfs_key = self.add_file_to_ipfs(filepath) else: ipfs_key = None file_version = ipfs_key if not ipfs_key and os.path.isfile(filepath): # 兼容没有 ipfs 的时候,用文件的 md5 值来代替 file_version = get_md5_for_file(filepath) # 跟服务端上的 files 的 lower_files 上的信息进行比对,如果文件相同,则 ignore 掉 lower_relative_path = to_unicode(relative_path.lower()) should_ignore = False if file_version: remote_file_version = get_value_from_data(lower_files_info_on_server.get(lower_relative_path), 'version') if not remote_file_version: remote_file_version = get_value_from_data(lower_files_info_on_server.get(lower_relative_path), 'hash') if remote_file_version == file_version: #if settings.DEBUG: # print('has same file on server already for %s' % relative_path) should_ignore = True self.ipfs_files[relative_path] = dict(hash=file_version, size=file_size, real_size=file_real_size) is_dir = os.path.isdir(filepath) if is_dir: if lower_relative_path in lower_folders_info_on_server: #if settings.DEBUG: # print('has same folder on server already for %s' % relative_path) should_ignore = True if should_ignore: # ignore 的进行保存,避免下次 loop 继续被找到 after_synced(filepath, root=self.root, app_name=self.app_name_for_sync) else: sync_compiler_worker = FarBoxSyncCompilerWorker( server_node=self.server_node, root=self.root, filepath=filepath, is_deleted=False, is_dir=is_dir, private_key=self.private_key, should_encrypt_file=self.should_encrypt_file, ipfs_key = ipfs_key, auto_clean_bucket=self.auto_clean_bucket, files_info=self.files_info, ) sync_status = sync_compiler_worker.sync() self.record_sync_log(filepath=filepath, sync_status=sync_status, is_deleted=False) if sync_status and sync_status.get('code') == 200: synced = True after_synced(filepath, root=self.root, app_name=self.app_name_for_sync) if settings.DEBUG: print("synced (to) %s" % filepath) if should_store_files_info: self.store_files_info() elif not sync_status: # 没有 status 返回, 认为属于 ignore 的一种 after_synced(filepath, root=self.root, app_name=self.app_name_for_sync) return synced
def to_clean_key(key): key = re.sub(r'-----(BEGIN|END).*? (PRIVATE|PUBLIC) KEY-----', '', to_unicode(key)) key = key.strip() return key
def zset(namespace, key, score): # name is zset_key, key is zset_score if not db_client: return score = to_unicode(int(score)) db_client.zset(namespace, key, score)