def create_note(note_dict, date_str=None): content = note_dict["content"] data = note_dict["data"] creator = note_dict["creator"] priority = note_dict["priority"] mtime = note_dict["mtime"] parent_id = note_dict.get("parent_id", "0") name = note_dict.get("name") # 创建笔记的基础信息 note_id = create_note_base(note_dict, date_str) # 更新分组下面页面的数量 update_children_count(note_dict["parent_id"]) xmanager.fire("note.add", dict(name=name, type=type, id=note_id)) # 创建对应的文件夹 if type == "gallery": dirname = os.path.join(xconfig.UPLOAD_DIR, creator, str(note_id)) xutils.makedirs(dirname) # 更新统计数量 refresh_note_stat(creator) # 更新目录修改时间 touch_note(parent_id) return note_id
def chk_backup(): backup_dir = config.get("BACKUP_DIR") xutils.makedirs(backup_dir) files = os.listdir(backup_dir) sorted_files = sorted(files) logutil.info("sorted backup files: {}", sorted_files) if _remove_old and len(sorted_files) > _MAX_BACKUP_COUNT: target = sorted_files[0] target_path = os.path.join(backup_dir, target) fsutil.remove(target_path) logutil.warn("remove file {}", target_path) if len(sorted_files) == 0: backup_db() else: lastfile = sorted_files[-1] p = re.compile(r"data\.(\d+)\.db") m = p.match(lastfile) if m: data = m.groups()[0] tm_time = time.strptime(data, "%Y%m%d") seconds = time.mktime(tm_time) now = time.time() # backup every 10 days. if now - seconds > _BACKUP_INTERVAL: backup_db() else: # 先创建一个再删除 backup_db() lastfile_path = os.path.join(backup_dir, lastfile) fsutil.remove(lastfile_path) logutil.warn("not valid db backup file, remove {}", lastfile_path)
def create_note(note_dict): content = note_dict["content"] data = note_dict["data"] creator = note_dict["creator"] priority = note_dict["priority"] mtime = note_dict["mtime"] parent_id = note_dict.get("parent_id", "0") name = note_dict.get("name") note_id = dbutil.timeseq() note_dict["id"] = note_id kv_put_note(note_id, note_dict) # 更新分组下面页面的数量 TODO update_children_count(note_dict["parent_id"]) xmanager.fire("note.add", dict(name=name, type=type)) # 创建对应的文件夹 if type != "group": dirname = os.path.join(xconfig.UPLOAD_DIR, creator, str(parent_id), str(note_id)) else: dirname = os.path.join(xconfig.UPLOAD_DIR, creator, str(note_id)) xutils.makedirs(dirname) return note_id
def POST(self): user_name = xauth.current_name() part_file = True chunksize = 5 * 1024 * 1024 chunk = xutils.get_argument("chunk", 0, type=int) chunks = xutils.get_argument("chunks", 1, type=int) file = xutils.get_argument("file", {}) prefix = xutils.get_argument("prefix", "") dirname = xutils.get_argument("dirname", xconfig.DATA_DIR) dirname = dirname.replace("$DATA", xconfig.DATA_DIR) note_id = xutils.get_argument("note_id") # 不能访问上级目录 if ".." in dirname: return dict(code="fail", message="can not access parent directory") filename = None webpath = "" origin_name = "" if hasattr(file, "filename"): origin_name = file.filename xutils.trace("UploadFile", file.filename) filename = os.path.basename(file.filename) filename = xutils.get_real_path(filename) if dirname == "auto": filename = generate_filename(filename, prefix) filepath, webpath = xutils.get_upload_file_path(user_name, filename, replace_exists=True) dirname = os.path.dirname(filepath) filename = os.path.basename(filepath) else: # TODO check permission. pass if part_file: tmp_name = "%s_%d.part" % (filename, chunk) seek = 0 else: tmp_name = filename seek = chunk * chunksize xutils.makedirs(dirname) tmp_path = os.path.join(dirname, tmp_name) with open(tmp_path, "wb") as fp: fp.seek(seek) if seek != 0: xutils.log("seek to {}", seek) for file_chunk in file.file: fp.write(file_chunk) else: return dict(code="fail", message="require file") if part_file and chunk+1==chunks: self.merge_files(dirname, filename, chunks) try_touch_note(note_id) if note_id != None and note_id != "": xutils.call("note.touch", note_id) return dict(code="success", webpath=webpath, link=get_link(origin_name, webpath))
def download_res_list(reslist, dirname): dirname = os.path.join("./tmp", dirname) xutils.makedirs(dirname) for res in reslist: print("Download", res) res = xutils.quote_unicode(res) bytes = xutils.urlopen(res).read() name = get_res_name(res) path = os.path.join(dirname, name) with open(path, "wb") as fp: fp.write(bytes)
def GET(self, error=""): parent = config.APP_DIR xutils.makedirs(parent) app_list = [] for fname in os.listdir(parent): fpath = os.path.join(parent, fname) if fname.endswith(".zip"): app_list.append(FileInfo(fname, parent)) return xtemplate.render("system/app_admin.html", app_list=app_list, error=error, upload_path=os.path.abspath(xconfig.APP_DIR))
def POST(self): args = web.input(file = {}, dirname = None) x = args dirname = args.dirname if 'file' in x: if x.file.filename == "": raise web.seeother("//fs/%s" % quote(dirname)) xutils.makedirs(dirname) filename = xutils.quote(os.path.basename(x.file.filename)) filepath = os.path.join(dirname, filename) with open(filepath, "wb") as fout: # fout.write(x.file.file.read()) for chunk in x.file.file: fout.write(chunk) raise web.seeother("/fs/%s" % quote(dirname))
def chk_backup(): backup_dir = xconfig.BACKUP_DIR xutils.makedirs(backup_dir) files = os.listdir(backup_dir) sorted_files = sorted(files) logutil.info("sorted backup files: {}", sorted_files) for fname in sorted_files: path = os.path.join(backup_dir, fname) ctime = os.stat(path).st_ctime print("%s - %s" % (path, xutils.format_time(ctime))) tm = time.localtime() if tm.tm_wday != 5: print("not the day, quit") # 一周备份一次 return if _remove_old and len(sorted_files) > _MAX_BACKUP_COUNT: target = sorted_files[0] target_path = os.path.join(backup_dir, target) fsutil.remove(target_path) logutil.warn("remove file {}", target_path) if len(sorted_files) == 0: backup_db() else: lastfile = sorted_files[-1] p = re.compile(r"data\.(\d+)\.db") m = p.match(lastfile) if m: data = m.groups()[0] tm_time = time.strptime(data, "%Y%m%d") seconds = time.mktime(tm_time) now = time.time() # backup every 10 days. if now - seconds > _BACKUP_INTERVAL: backup_db() else: # 先创建一个再删除 backup_db() lastfile_path = os.path.join(backup_dir, lastfile) fsutil.remove(lastfile_path) logutil.warn("not valid db backup file, remove {}", lastfile_path)
def POST(self): try: file = xutils.get_argument("file", {}) address = xutils.get_argument("url", "") name = xutils.get_argument("name", "") filename = "" if hasattr(file, "filename"): filename = file.filename plain_text = "" if not isempty(address): html = readhttp(address) else: # 读取文件 html = "" for chunk in file.file: html += chunk.decode("utf-8") print("Read html, filename={}, length={}".format( filename, len(html))) soup = BeautifulSoup(html, "html.parser") element_list = soup.find_all(["script", "style"]) for element in element_list: element.extract() plain_text = soup.get_text(separator=" ") plain_text = clean_whitespace(plain_text) images = soup.find_all("img") links = soup.find_all("a") csses = soup.find_all("link") scripts = soup.find_all("script") # texts = soup.find_all(["p", "span", "div", "h1", "h2", "h3", "h4"]) h = HTML2Text(baseurl=address) text = "From %s\n\n" % address + h.handle(html) texts = [text] images = get_addr_list(images) scripts = get_addr_list(scripts) if name != "" and name != None: dirname = os.path.join(xconfig.DATA_DIR, time.strftime("archive/%Y/%m/%d")) xutils.makedirs(dirname) path = os.path.join( dirname, "%s_%s.md" % (name, time.strftime("%H%M%S"))) xutils.savetofile(path, text) print("save file %s" % path) if False: user_name = xauth.get_current_name() xutils.call("note.create", name=name, content=content, type="md", tags=["来自网络"], creator=user_name) return xtemplate.render(self.template_path, show_aside=False, images=images, links=links, csses=csses, scripts=scripts, texts=texts, address=address, url=address, plain_text=plain_text) except Exception as e: xutils.print_stacktrace() return xtemplate.render(self.template_path, show_aside=False, error=str(e))
def save_to_archive_dir(name): dirname = os.path.join(xconfig.DATA_DIR, time.strftime("archive/%Y/%m/%d")) xutils.makedirs(dirname) path = os.path.join(dirname, "%s_%s.md" % (name, time.strftime("%H%M%S"))) xutils.savetofile(path, text) print("save file %s" % path)