def test_file_done(): with patch("os.rename") as m_rename: with patch("os.remove") as m_remove: # add fake_file to database in order to test renaming filename = "ThisIsAmazing.11.12.10.PyEncode.Is.The.Best.SEPARATOR.1080p.MP4-ABC.mp4" path = "/this/path/is/fake" fake_file = File(id=1, filename=path + os.sep + filename) fake_file.output_filename = filename + ".pyencode" db.session.add(fake_file) db.session.commit() # set options we want to test config["encoding"]["delete_old_file"] = "True" config["encoding"]["rename_enabled"] = "True" config["encoding"][ "rename_search"] = r"(?P<head>.+)(?P<resolution>1080|720|2160)(?:p|P)\.(?P<tail>.+)\.(?P<extension>\w{3})" config["encoding"][ "rename_replace"] = r"\g<head>720p.\g<tail>-selfmade.mkv" expected_filename = "ThisIsAmazing.11.12.10.PyEncode.Is.The.Best.SEPARATOR.720p.MP4-ABC-selfmade.mkv" ProcessRepository.processes[fake_file.id] = None ProcessRepository.file_done(fake_file) m_remove.assert_called_once_with(fake_file.filename) m_rename.assert_called_once_with( path + os.sep + fake_file.output_filename, path + os.sep + expected_filename) return
def setUp(self): super().setUp() # add some dummy data self.package1 = Package(queue=True, title="Dummy Package") db.session.add(self.package1) # add a File that is processing self.p1_file1 = File(filename="dummy.mkv", size=100 * 1024, status=StatusMap.processing.value) # add a File that is queued self.p1_file2 = File(filename="dummy.mkv", size=100 * 1024, status=StatusMap.queued.value) db.session.add(self.p1_file1, self.p1_file2) self.package1.files.append(self.p1_file1) self.package1.files.append(self.p1_file2) db.session.commit() # add a second package for some tests self.package2 = Package(queue=True, title="Dummy Package 2") db.session.add(self.package2) self.p2_file1 = File(filename="dummy.mkv", size=100 * 1024, status=StatusMap.processing.value) self.p2_file2 = File(filename="dummy.mkv", size=100 * 1024, status=StatusMap.queued.value) db.session.add(self.p2_file1, self.p2_file2) self.package2.files.append(self.p2_file1) self.package2.files.append(self.p2_file2) db.session.commit() return
def file_upload(): if request.method == 'POST': f = request.files['file'] if "name" in request.form: name = request.form["name"] else: name = f.filename md5 = hashlib.md5(f.read()).hexdigest() real_name = "{}.{}".format(md5, name.rsplit('.', 1)[1]) if pathlib.Path(config["basic"].FILE_DIR + "/{}".format(real_name)).is_file(): pass else: f.seek(0) f.save(config["basic"].FILE_DIR + "/{}".format(real_name)) try: uid = get_uid_by_tokenid(request.form['tokenid']) new_file = File(_uid=uid, _md5=md5, _src_name=name) print(new_file.json()) db.session.add(new_file) db.session.commit() return jsonify(new_file.json()) except: return jsonify({'code': 3, 'msg': 'commit changes fail'}) if request.method == 'GET': return '''
def upload(self): ret = [] self.mkdir_if_not_exists() for single in self._file_storage: file_md5 = self._generate_md5(single.read()) single.seek(0) file = File.query.filter_by(md5=file_md5).first() # 如果,文件已存在 # 如果,已存在的文件的父文件夹和目标文件夹不一致,则复制 # 否者,沿用file文件(不用else) if file and file.parent_id != self.parent_id: if not File.query.filter_by(parent_id=self.parent_id, md5=file_md5).first(): file = FileDao.copy_file(dest_parent_id=self.parent_id, src_file_id=file.id) # 如果,文件不存在 if not file: absolute_path, relative_path, uuid_filename = self._get_store_path( single.filename) secure_filename(single.filename) single.save(absolute_path) File.create(parent_id=self.parent_id, name=single.filename, uuid_name=uuid_filename, path=relative_path, extension=self._get_ext(single.filename), size=self._get_size(single), md5=file_md5) file = File.get(parent_id=self.parent_id, md5=file_md5) ret.append(file) return ret
def test_file_done(): with patch("os.rename") as m_rename: with patch("os.remove") as m_remove: # add fake_file to database in order to test renaming filename = "ThisIsAmazing.11.12.10.PyEncode.Is.The.Best.SEPARATOR.1080p.MP4-ABC.mp4" path = "/this/path/is/fake" fake_file = File(id=1, filename=path + os.sep + filename) fake_file.output_filename = filename + ".pyencode" db.session.add(fake_file) db.session.commit() # set options we want to test config["encoding"]["delete_old_file"] = "True" config["encoding"]["rename_enabled"] = "True" config["encoding"][ "rename_search"] = r"(?P<head>.+)(?P<resolution>1080|720|2160)(?:p|P)\.(?P<tail>.+)\.(?P<extension>\w{3})" config["encoding"]["rename_replace"] = r"\g<head>720p.\g<tail>-selfmade.mkv" expected_filename = "ThisIsAmazing.11.12.10.PyEncode.Is.The.Best.SEPARATOR.720p.MP4-ABC-selfmade.mkv" ProcessRepository.processes[fake_file.id] = None ProcessRepository.file_done(fake_file) m_remove.assert_called_once_with(fake_file.filename) m_rename.assert_called_once_with(path + os.sep + fake_file.output_filename, path + os.sep + expected_filename) return
def admin_file_edit(): directory_id = request.form.get('directory_id') directory = Directory.query.get(directory_id) name = request.form.get('name') file = File.get_by_directory_and_name(directory, name) if file: old_description = file.description else: p = Path(directory.path + '/' + name) if not p.is_file(): raise NotFound file = File(name=name, directory_id=directory.id) db.session.add(file) old_description = "No description" form = FileForm(obj=file) if form.validate_on_submit(): form.populate_obj(file) message = Entry.make_diff_message( {"Description": (old_description, file.description)}) entry = Entry(user_id=current_user.id, ip=int(ip_address(request.remote_addr)), message="Edited file {0}, {1}".format( directory.path + '/' + file.name, message)) db.session.add(entry) db.session.commit() flash(u"File edited", 'success') return redirect(url_for('browse_path', name=directory.name))
def upload(self): ret = [] for single in self._file_storage: file_md5 = self._generate_md5(single.read()) single.seek(0) file = File.query.filter_by(md5=file_md5).first() parent_folder = File.get_or_404(id=self.parent_id) if file and self.parent_id != file.parent_id: if not File.query.filter_by(parent_id=self.parent_id, md5=file_md5).first(): file = FileDao.copy_file(dest_parent_id=self.parent_id, src_file_id=file.id, user_id=parent_folder.user_id) file = File.get(md5=file_md5) if not file: absolute_path, relative_path, uuid_filename = self._get_store_path( single.filename) # 上传到七牛云,获取存储地址 path = self.save(single.filename, single.read()) if path: secure_filename(single.filename) File.create(parent_id=self.parent_id, name=single.filename, uuid_name=uuid_filename, path=path, extension=self._get_ext(single.filename), size=self._get_size(single), md5=file_md5, _from=UrlFromEnum.NETWORK.value) file = File.get(parent_id=self.parent_id, md5=file_md5) ret.append(file) return ret
def create_folder(parent_id, filename): ''' :param parent_id: 父级目录ID :param filename: 文件名 :return: ''' File.abort_repeat(parent_id=parent_id, name=filename, msg='文件名重复,请重命名!') File.create(parent_id=parent_id, name=filename)
def post(self): try: upload = self.get_uploads()[0] upload_file = File( name=upload.filename, format=upload.content_type, size=upload.size, blob_key=upload.key()) upload_file.put() self.redirect('/') except: self.error(500)
def copy_file(dest_parent_id, src_file_id): ''' :param dest_parent_id: 目标父级目录ID :param src_file_id: 源文件ID :return: ''' src_file = File.get_or_404(id=src_file_id) File.create(parent_id=dest_parent_id, uuid_name=src_file.uuid_name, name=src_file.name, path=src_file.path, extension=src_file.extension, _from=src_file._from, size=src_file.size, md5=src_file.md5)
def reconstruct_files(): count = 0 for file in File.objects(fragments__is_clean=False): logger.debug('Reconstructing {}'.format(file)) file.reconstruct() count += 1 return count
def admin_show_file_edit(name): directory, file, name = get_file_and_directory(name) file = File.get_by_directory_and_name(directory, name) form = FileForm(obj=file) return render_template('edit_file.html.j2', form=form, name=name, directory_id=directory.id)
def post_file(request): files = File.objects(Q(source=request.parameters['source']) & Q(collection=request.parameters['collection']) & Q(filename=request.parameters['name'])).all() if len(files) == 0: file = File() file.collection = request.parameters['collection'] file.filename = request.parameters['name'] file.encoding = Encoding(**DEFAULT_ENCODING) hubs = Hub.objects(cumulus_id=request.parameters['source']) if len(hubs) == 0: raise ObjectDoesNotExist('Source does not exist') elif len(hubs) == 1: hub = hubs[0] else: raise MultipleObjectsFound('Multiple files found for source') file.source = hub elif len(files) == 1: file = files[0] else: raise MultipleObjectsFound('Multiple objects found for the search query') with file as f: f.write(request.data) return FileSerializer(file).data
def move_files(dest_parent_id, file_ids): ''' :param dest_parent_id: 目标父级目录ID :param file_id: 文件ID :return: ''' file_list = File.query.filter(File.id.in_(file_ids)).all() # 如果不重名则可以复制 with db.auto_commit(): for file in file_list: File.abort_repeat(parent_id=dest_parent_id, name=file.name, extension=file.extension, msg='文件名重复,无法移动!') file.update( parent_id=dest_parent_id, commit=False, )
def get(self): """Handle GET requests.""" upload_url = blobstore.create_upload_url('/upload_file') files = File.query() template = 'templates/index.html' context = { 'files': files, 'upload_url': upload_url } self.response.write(render(template, context))
def user_create_payment(): user = None payload = request.get_json() if not current_user.has_role( 'admin') or 'user_id' not in request.json.keys(): request.json['user_id'] = current_user.id user = current_user elif int(payload['user_id']) == current_user.id: user = current_user else: user = User.query.get(payload['user_id']) with PaymentValidator(request) as validator: if not validator.validate(): return jsonify({ 'data': [], 'error': "Couldn't create a Payment", 'fieldErrors': [{ 'name': message.split(':')[0], 'status': message.split(':')[1] } for message in validator.errors] }) if isinstance(payload['amount_sent_original'], str): payload['amount_sent_original'] = payload[ 'amount_sent_original'].replace(',', '.') currency = Currency.query.get(payload['currency_code']) if not currency: abort( Response(f"No currency <{payload['currency_code']}> was found", status=400)) evidences = [] if payload.get('evidences'): for evidence in payload['evidences']: evidences.append( File(file_name=evidence['file_name'], path=_move_uploaded_file(evidence['id']))) payment = Payment( user=user, changed_by=current_user, orders=Order.query.filter(Order.id.in_(payload['orders'])).all(), currency=currency, amount_sent_original=payload.get('amount_sent_original'), amount_sent_krw=float(payload.get('amount_sent_original')) / float(currency.rate), payment_method_id=payload.get('payment_method').get('id'), additional_info=payload.get('additional_info'), evidences=evidences, status=PaymentStatus.pending, when_created=datetime.now()) db.session.add(payment) db.session.commit() return jsonify({'data': [payment.to_dict()]})
def user_save_payment(payment_id): '''Saves updates in payment''' payment = Payment.query.get(payment_id) if not payment: abort(404) if not payment.is_editable(): abort( Response(f"Can't update payment in state <{payment.status}>", status=409)) with PaymentValidator(request) as validator: if not validator.validate(): abort(jsonify(validator.errors)) # return jsonify({ # 'id': payment_id, # 'data': [payment.to_dict()], # 'cancelled': [payment_id], # 'error': "Couldn't update a Payment", # 'fieldErrors': [{'name': message.split(':')[0], 'status': message.split(':')[1]} # for message in payload.errors] # }) payload = request.get_json() if current_user.has_role('admin'): modify_object(payment, payload, [ 'additional_info', 'amount_sent_krw', 'amount_sent_original', 'amount_received_krw', 'currency_code', 'status', 'user_id' ]) if payload.get('payment_method') \ and payment.payment_method_id != payload['payment_method']['id']: payment.payment_method_id = payload['payment_method']['id'] payment.when_changed = datetime.now() evidences = {e.path: e for e in payment.evidences} payment.evidences = [] for evidence in payload.get('evidences'): if evidence.get('id'): payment.evidences.append( File(file_name=evidence['file_name'], path=_move_uploaded_file(evidence['id']))) elif evidence.get('path'): payment.evidences.append(evidences[evidence['path']]) # removed_evidences = payment.evidences.filter( # File.path.notin_(remaining_evidences)) # for evidence in removed_evidences: # payment.evidences.filter_by(id=evidence.id).delete() # db.session.delete(evidence) if payload.get('orders'): payment.orders = Order.query.filter(Order.id.in_( payload['orders'])) else: modify_object(payment, payload, ['status']) payment.changed_by = current_user db.session.commit() return jsonify({'data': [payment.to_dict()]})
def get_file(request): files = File.objects(Q(source=request.parameters['source']) & Q(collection=request.parameters['collection']) & Q(filename=request.parameters['name'])).all() if len(files) == 0: raise ObjectDoesNotExist('File does not exist') elif len(files) == 1: file = files[0] else: raise MultipleObjectsFound('Multiple files found for the search query') return FileContentSerializer(file).data
def _save_file(user, filename, file, public): title, tail = spilt_point(filename) url = get_file_url('USER_FILE_URL') + str(user.id) + '/' + tail + '/' makedir(url) url = url + uuid.uuid1().hex + '.' + tail file.save(url) with db.auto_commit(): new_file = File(title=title, format=tail, owner_id=user.id, all_could=public, url=url.replace(get_file_url('USER_FILE_URL'), '')) db.session.add(new_file)
def test_check_and_start_processes(self, mock_probe, mock_run): def mocked_run_ffmpeg(cmd, frame_count): yield { "return_code": -1, "ffmpeg_eta": 1, "ffmpeg_progress": 0, "ffmpeg_bitrate": 0, "ffmpeg_time": 0, "ffmpeg_size": 0, "ffmpeg_fps": 0 } # mock run_ffmpeg() mock_run.side_effect = mocked_run_ffmpeg # add Package package = Package(queue=True) db.session.add(package) # add File file = File(status=StatusMap.queued.value, filename="dummy.mkv", size=100 * 1024) db.session.add(file) package.files.append(file) db.session.commit() # set parallel processes to 1 config["general"]["parallel_processes"] = "1" # start processing with patch("os.rename"): with patch("os.remove"): ProcessRepository.set_encoding_active(True) self.assertTrue(mock_run.called) self.assertTrue(mock_run.call_count == 1) received = self.socketio_client.get_received() # there should have gotten one file_started and file_done event triggered self.assertEqual( len([x for x in received if x["name"] == "file_started"]), 1) self.assertEqual( len([x for x in received if x["name"] == "file_done"]), 1) # the status should be "finished" now # self.assertEqual(File.query.filter_by(id=file.id).first().status, StatusMap.finished.value) # print(File.query.filter_by(id=file.id).first().status) return
def v_random(func): pipeline = [{ '$sample': { 'size': int(len(File.objects) * VERIFY_FRACTION) } }] files_to_reconstruct = list() for file_dict in File.objects.aggregate(*pipeline): file = one(File.objects(uuid=file_dict['uuid'])) if not getattr(file, func)(): files_to_reconstruct.append(file.uuid) logger.debug('{} check failed: {}: {}/{}/{}'.format( func, file.uid, file.source, file.collection, file.filename)) logger.info('Files to reconstruct: {}'.format(len(files_to_reconstruct)))
def upload(): key = False if "key" in request.values: key = request.values["key"] user_defined = g.user is not None and g.user.is_authenticated() and g.user.key == key if not user_defined: return {"error": "User not defined"} file = request.files["file"] type = file.mimetype name = file.filename extension = os.path.splitext(name)[1][1:] if type in Settings.extensions: extension = Settings.extensions[type] user_id = g.user.id phrase = name.encode("utf-8") + datetime.now().isoformat() hash = hashlib.md5(phrase).hexdigest() directory = os.path.join(UPLOAD_PATH, str(user_id)) if not os.path.exists(directory): os.makedirs(directory) path = os.path.join(directory, hash) + "." + extension file.save(path) size = os.path.getsize(path) model = File.query.filter_by(path=path).first() if model is None: model = File() model.user_id = user_id model.name = (name,) model.path = (path,) model.type = (type,) model.size = (size,) model.created_at = (datetime.utcnow(),) model.extension = extension db.session.add(model) db.session.commit() return { "user_id": g.user.id, "auth": user_defined, "name": name, "path": path, "type": type, "id": model.id, "size": model.size, }
def add_package(): """ add a new Package :return: "1" """ i = 0 paths = json.loads(request.form["paths"]) for path in paths: paths[i] = html.unescape(path) path = paths[i] if not os.path.isfile(path): print(path + " does not exist..") return "not_existing" i += 1 last_package = Package.query.filter_by(queue=True).order_by(Package.position.desc()).limit(1).first() if not last_package: position = 0 else: position = last_package.position + 1 package = Package(user=current_user, title=request.form["title"], queue=(request.form["queue"] == "1"), position=position) db.session.add(package) file_pos = 0 for path in paths: file = File(filename=path, size=os.path.getsize(path), status=StatusMap.queued.value, position=file_pos) package.files.append(file) file_pos += 1 db.session.commit() # after adding, see if we have to start processes ProcessRepository.check_and_start_processes() return "1"
def get_folder_tree(): folder_list = File.get_all(extension=None) t = FolderTree() t.generate_by_list(folder_list) return t.serialize()
def rename_file(file_id, new_filename): file = File.get_or_404(id=file_id) File.abort_repeat(parent_id=file.parent_id, name=new_filename, msg='文件名重复,请重命名!') file.update(name=new_filename)
def rev_file(): try: from werkzeug.utils import secure_filename import datetime from app.extensions import db accesstoken = request.form.get('accesstoken') # 定义返回参数 file_url = "" # 接收部分参数 obj = request.form.get('type') or "None" # 权限控制 user_auth = User.get_user_auth(access_token=accesstoken) print(user_auth) if not user_auth or obj not in user_auth["pub"]: return jsonify({"status": "failed", "data": {"code": 1, "msg": "你无权限上传文件!"}}) # 接收文件 f = request.files["file"] or None if f is None: return jsonify({"status": "failed", "data": {"code": 2, "msg": "未接收到文件!"}}) # 取文件名和后缀 fn, ff = os.path.splitext(secure_filename(f.filename)) # 格式控制 if str.lower(ff) not in current_app.config["ALLOWED_FILE_FORMAT"]: return jsonify({"status": "failed", "data": {"code": 3, "msg": "不允许的文件格式!"}}) # 文件大小 f_size = len(f.read()) # 重置文件指针 f.seek(0, 0) if f_size > current_app.config["MAX_FILE_SIZE"]: return jsonify({"status": "failed", "data": {"code": 4, "msg": "文件过大!"}}) # 文件查重 file_hash = File.cul_file_hash(f) s = SQL() query_res = s.query_value("`path`", "`file`", "`hash`='%s'" % file_hash) uid = s.query_value("`id`", "`user`", "access_token='%s'" % accesstoken)[0] # 文件未收录 if query_res is None or len(query_res) < 1: # 文件名安全性 werzeug basepath = current_app.config["UPLOADED_TMP_DEST"] if obj == "news": basepath = current_app.config["UPLOADED_NEWS_DEST"] file_url = os.path.join(current_app.config["UPLOADED_BASE_DIR"], 'news') elif obj == "notice": basepath = current_app.config["UPLOADED_NOTICE_DEST"] file_url = os.path.join(current_app.config["UPLOADED_BASE_DIR"], 'notice') elif obj == "file": basepath = current_app.config["UPLOADED_FILE_DEST"] file_url = os.path.join(current_app.config["UPLOADED_BASE_DIR"], 'file') elif obj == "person": basepath = current_app.config["UPLOADED_FILE_DEST"] file_url = os.path.join(current_app.config["UPLOADED_BASE_DIR"], 'person') else: basepath = current_app.config["UPLOADED_TMP_DEST"] file_url = os.path.join(current_app.config["UPLOADED_BASE_DIR"], 'tmp') filename = fn + "-" + obj + "-" + file_hash + ff filepath = os.path.join(basepath, filename) # 存入目录 f.save(filepath) file_url = os.path.join(file_url, filename) # # # 设置文件权限 # 如果不是公开文件,就要设置动态下载 # # 数据库存档 new_file = File(name=filename, type=ff, author=uid, pub_date=datetime.datetime.now(), path=file_url , authority=File.create_obj_ahthority(), hash=file_hash, status="default") db.session.add(new_file) db.session.commit() # 文件已收录,返回文件路径 else: # 返回文件路径 file_url = query_res[0] # 记录用户操作记录 uip = request.remote_addr or "hidden" usa = str(request.user_agent) or "hidden" lgn = Login(uid=uid, time=datetime.datetime.now(), ip=uip, user_agent=usa, access_token=accesstoken, mani="upload file: %s" % file_url) db.session.add(lgn) db.session.commit() return jsonify({"status": "ok", "data": {"url": file_url}}) except Exception as e: log.logger.warning(e) return jsonify({"status": "failed", "data": "错误请求"})
filename1 = 'fluentpython.pdf' with open(os.path.join('tmp', filename1), 'rb') as f: file_content1 = f.read() filename2 = 'learningreact1.pdf' with open(os.path.join('tmp', filename2), 'rb') as f: file_content2 = f.read() hub = Hub.objects.first() if hub is None: hub = Hub(reference='HUB-' + uuid.uuid4().hex) hub.save() start = time.perf_counter() file = File() file.source = hub file.collection = 'nextcloud' file.filename = filename1 file.encoding = Encoding(name='liberasurecode_rs_vand', k=2, m=3) with file as f: f.write(file_content1) for t_hub in Hub.objects: if t_hub.cumulus_id != hub.cumulus_id: t_hub.delete() orphan_count = OrphanedFragment.objects.count() fragments = set([f.uuid for f in file.fragments])