def get_downloadUrl(drive_id, disk_id, id): data_list = driveModels.drive_list.find_by_id(disk_id) token = json.loads(json.loads(data_list.token)) BaseUrl = config.app_url + 'v1.0/me/drive/items/' + id headers = {'Authorization': 'Bearer {}'.format(token["access_token"])} get_res = requests.get(BaseUrl, headers=headers, timeout=30) get_res = json.loads(get_res.text) if 'error' in get_res.keys(): driveLogic.reacquireToken(disk_id) return get_downloadUrl(drive_id, disk_id, id) else: if '@microsoft.graph.downloadUrl' in get_res.keys(): drivename = "drive_" + str(disk_id) collection = MongoDB.db[drivename] result = collection.find_one({"id": get_res["id"]}) if result: collection.update_one({"id": get_res["id"]}, { "$set": { "downloadUrl": get_res["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } }) else: dic = { "id": get_res["id"], "parentReference": get_res["parentReference"]["id"], "name": get_res["name"], "file": get_res["file"]["mimeType"], "path": get_res["parentReference"]["path"].replace( "/drive/root:", ""), "size": get_res["size"], "createdDateTime": common.utc_to_local( get_res["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local( get_res["fileSystemInfo"]["lastModifiedDateTime"]), "downloadUrl": get_res["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } collection.insert_one(dic) return { "name": get_res["name"], "downloadUrl": get_res["@microsoft.graph.downloadUrl"] } else: return get_downloadUrl(drive_id, disk_id, id)
def files(id): drive_info = models.drive_list.find_by_id(id) drive_id = drive_info.drive_id chief = int(drive_info.chief) uploads_path = request.args.get('path') if request.args.get('path'): path = request.args.get("path") current_url = '/admin/drive/files/' + str(id) + '/?path=' + path else: path = '' current_url = '/admin/drive/files/' + str(id) + '/?path=' data = logic.get_one_file_list(id, path) for i in data["data"]: i["lastModifiedDateTime"] = common.utc_to_local( i["lastModifiedDateTime"]) i["size"] = common.size_cov(i["size"]) data = data["data"] return render_template('admin/drive/files.html', top_nav='drive', activity_nav='edit', chief=chief, id=id, current_url=current_url, drive_id=drive_id, uploads_path=uploads_path, data=data)
def upProcess(id, uid, files_disk_id, fileName, remotePath): drivename = "files_disk_" + str(files_disk_id) collection = MongoDB.db[drivename] filesize = os.path.getsize(os.getcwd() + "/temp_uploads/users_files_temp/" + str(files_disk_id) + "/" + str(uid) + "/" + fileName) if filesize > 4194304: res = putfilebig(id, uid, files_disk_id, fileName, remotePath) dic = { "id": res["id"], "parentReference": res["parentReference"]["id"], "name": res["name"], "file": "folder", "path": res["parentReference"]["path"].replace("/drive/root:", ""), "size": res["size"], "createdDateTime": common.utc_to_local(res["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local(res["fileSystemInfo"]["lastModifiedDateTime"]), "downloadUrl": res["@content.downloadUrl"], "timeout": int(time.time()) + 300 } else: res = putfilesmall(id, uid, files_disk_id, fileName, remotePath) dic = { "id": res["id"], "parentReference": res["parentReference"]["id"], "name": res["name"], "file": "folder", "path": res["parentReference"]["path"].replace("/drive/root:", ""), "size": res["size"], "createdDateTime": common.utc_to_local(res["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local(res["fileSystemInfo"]["lastModifiedDateTime"]), "downloadUrl": res["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } collection.insert_one(dic) target_filename = os.getcwd()+"/temp_uploads/users_files_temp/" + str(files_disk_id) + "/" + str(uid) + "/" + fileName os.remove(target_filename) # 删除文件 updateres = {"id":id, "file":res["file"]["mimeType"], "size":res["size"], "files_id":res["id"], "status":1} filesModels.files.update(updateres) # 更新任务状态
def files_disk_files(id): uploads_path = request.args.get('path') if request.args.get('path'): path = request.args.get("path") current_url = '/admin/files/files_disk_files/' + str(id) + '/?path=' + path else: path = '' current_url = '/admin/files/files_disk_files/' + str(id) + '/?path=' data = logic.get_one_file_list(id, path) for i in data["data"]["value"]: i["lastModifiedDateTime"] = common.utc_to_local(i["lastModifiedDateTime"]) i["size"] = common.size_cov(i["size"]) data = data["data"]["value"] return render_template('admin/files/files_disk_files.html', top_nav='files', activity_nav='edit', id=id, current_url=current_url, data=data)
def task_getlist(id, path, type): if path: path = path else: path = '' res = logic.get_one_file_list(id, path) try: # 创建集合 - 不添加一条数据,集合是不会创建的,因为MongoDB是惰性数据库 drivename = "drive_" + str(id) collection = MongoDB.db[drivename] for i in res["data"]["value"]: if type == "all": if "folder" in i.keys(): dic = { "id": i["id"], "parentReference": i["parentReference"]["id"], "name": i["name"], "file": "folder", "path": i["parentReference"]["path"].replace( "/drive/root:", ""), "size": i["size"], "createdDateTime": common.utc_to_local( i["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local( i["fileSystemInfo"]["lastModifiedDateTime"]) } collection.insert_one(dic) t = threading.Thread(target=task_getlist, args=( id, "/" + path + "/" + i["name"], type, )) t.start() else: t = threading.Thread(target=task_write, args=( id, i, type, )) t.start() else: if "folder" in i.keys(): if collection.find_one({"id": i["id"]}): t = threading.Thread(target=task_getlist, args=( id, "/" + path + "/" + i["name"], type, )) t.start() else: dic = { "id": i["id"], "parentReference": i["parentReference"]["id"], "name": i["name"], "file": "folder", "path": i["parentReference"]["path"].replace( "/drive/root:", ""), "size": i["size"], "createdDateTime": common.utc_to_local( i["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local( i["fileSystemInfo"]["lastModifiedDateTime"]) } collection.insert_one(dic) t = threading.Thread(target=task_getlist, args=( id, "/" + path + "/" + i["name"], type, )) t.start() else: t = threading.Thread(target=task_write, args=( id, i, type, )) t.start() except: task_getlist(id, path, type)
def task_write(id, data, type): # 创建集合 - 不添加一条数据,集合是不会创建的,因为MongoDB是惰性数据库 drivename = "drive_" + str(id) collection = MongoDB.db[drivename] if type == "all": dic = { "id": data["id"], "parentReference": data["parentReference"]["id"], "name": data["name"], "file": data["file"]["mimeType"], "path": data["parentReference"]["path"].replace("/drive/root:", ""), "size": data["size"], "createdDateTime": common.utc_to_local(data["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local( data["fileSystemInfo"]["lastModifiedDateTime"]), "downloadUrl": data["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } collection.insert_one(dic) else: if collection.find_one({"id": data["id"]}) is None: dic = { "id": data["id"], "parentReference": data["parentReference"]["id"], "name": data["name"], "file": data["file"]["mimeType"], "path": data["parentReference"]["path"].replace("/drive/root:", ""), "size": data["size"], "createdDateTime": common.utc_to_local(data["fileSystemInfo"]["createdDateTime"]), "lastModifiedDateTime": common.utc_to_local( data["fileSystemInfo"]["lastModifiedDateTime"]), "downloadUrl": data["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } collection.insert_one(dic) else: collection.update_one({"id": data["id"]}, { "$set": { "downloadUrl": data["@microsoft.graph.downloadUrl"], "timeout": int(time.time()) + 300 } })