def save_uploaded_file(): """ Receives files on the server side """ fchunk = FileChunk.init_from_request() logger.info("User uploaded chunk: {}".format(fchunk)) file_name = fchunk.file_name if not fchunk.afile: err = utils.pack_error("No file specified.") logger.error(err) return err chunk_path = get_chunk_path(file_name, fchunk.number) try: # For every request recived we store the chunk to a temp folder fchunk.afile.save(chunk_path) except Exception as exc: logger.error("Problem saving: {} due: {}".format(fchunk, exc)) return utils.pack_error("Unable to save file chunk: {}" .format(fchunk.number)) # When all chunks are recived we merge them if not all_chunks_received(fchunk): return utils.jsonify_success('Request completed successfully.') # When all chunks are received we merge them subject_file = merge_files(fchunk) if subject_file is not None: prefix = app.config['REDIDROPPER_UPLOAD_SAVED_DIR'] file_path = subject_file.get_full_path(prefix) delete_temp_files(fchunk) hash_matches = verify_file_integrity(fchunk) if hash_matches: LogEntity.file_uploaded(session['uuid'], file_path) return utils.jsonify_success('File {} uploaded successfully.' .format(file_name)) else: logger.error("md5 sum does not match for: {}".format(fchunk)) LogEntity.file_uploaded(session['uuid'], 'Checksum mismatch for file: {}' .format(file_path)) return utils.jsonify_error('Checksum mismatch for file: {}' .format(file_name)) else: LogEntity.file_uploaded(session['uuid'], 'Unable to merge chunks for file: {}' .format(file_path)) return utils.jsonify_error('Unable to merge chunks for file: {}' .format(file_name))
def save_uploaded_file(): """ Receives files on the server side """ fchunk = FileChunk.init_from_request() logger.info("User uploaded chunk: {}".format(fchunk)) file_name = fchunk.file_name if not fchunk.afile: err = utils.pack_error("No file specified.") logger.error(err) return err chunk_path = get_chunk_path(file_name, fchunk.number) try: # For every request recived we store the chunk to a temp folder fchunk.afile.save(chunk_path) except Exception as exc: logger.error("Problem saving: {} due: {}".format(fchunk, exc)) return utils.pack_error("Unable to save file chunk: {}".format( fchunk.number)) # When all chunks are recived we merge them if not all_chunks_received(fchunk): return utils.jsonify_success('Request completed successfully.') # When all chunks are received we merge them subject_file = merge_files(fchunk) if subject_file is not None: prefix = app.config['REDIDROPPER_UPLOAD_SAVED_DIR'] file_path = subject_file.get_full_path(prefix) delete_temp_files(fchunk) hash_matches = verify_file_integrity(fchunk) if hash_matches: LogEntity.file_uploaded(session['uuid'], file_path) return utils.jsonify_success( 'File {} uploaded successfully.'.format(file_name)) else: logger.error("md5 sum does not match for: {}".format(fchunk)) LogEntity.file_uploaded( session['uuid'], 'Checksum mismatch for file: {}'.format(file_path)) return utils.jsonify_error( 'Checksum mismatch for file: {}'.format(file_name)) else: LogEntity.file_uploaded( session['uuid'], 'Unable to merge chunks for file: {}'.format(file_path)) return utils.jsonify_error( 'Unable to merge chunks for file: {}'.format(file_name))
def save_uploaded_file(): """ Receives files on the server side """ fchunk = FileChunk() logger.info("Uploading {}".format(fchunk)) file_name = fchunk.file_name if not utils.allowed_file(file_name): err = utils.pack_error("Invalid file type: {}." \ "Allowed extensions: {}" \ .format(file_name, utils.ALLOWED_EXTENSIONS)) logger.error(err) return err if not fchunk.afile: err = utils.pack_error("No file specified.") logger.error(err) return err chunk_path = get_chunk_path(file_name, fchunk.number) try: # For every request recived we store the chunk to a temp folder fchunk.afile.save(chunk_path) except: logger.error("Problem saving: {}".format(fchunk)) return utils.pack_error("Unable to save file chunk: {}" \ .format(fchunk.number)) # When all chunks are recived we merge them if all_chunks_received(fchunk): merge_files(fchunk) verify_file_integrity(fchunk) delete_temp_files(fchunk) return utils.pack_info('File {} uploaded successfully.' \ .format(file_name)) else: return utils.pack_info('Request completed successfully.')
def save_uploaded_file(): """ Receives files on the server side """ fchunk = FileChunk() logger.info("Uploading {}".format(fchunk)) file_name = fchunk.file_name if not utils.allowed_file(file_name): err = utils.pack_error("Invalid file type: {}." "Allowed extensions: {}".format( file_name, utils.ALLOWED_EXTENSIONS)) logger.error(err) return err if not fchunk.afile: err = utils.pack_error("No file specified.") logger.error(err) return err chunk_path = get_chunk_path(file_name, fchunk.number) try: # For every request recived we store the chunk to a temp folder fchunk.afile.save(chunk_path) except: logger.error("Problem saving: {}".format(fchunk)) return utils.pack_error("Unable to save file chunk: {}".format( fchunk.number)) # When all chunks are recived we merge them if all_chunks_received(fchunk): merge_files(fchunk) verify_file_integrity(fchunk) delete_temp_files(fchunk) return utils.jsonify_success( 'File {} uploaded successfully.'.format(file_name)) else: return utils.jsonify_success('Request completed successfully.')
def api_save_user(): """ Add New User to the database """ email = request.form['email'] first = request.form['first'] last = request.form['last'] minitial = request.form['minitial'] roles = request.form.getlist('roles[]') app.logger.debug("roles: {}".format(roles)) email_exists = False try: existing_user = UserEntity.query.filter_by(email=email).one() email_exists = existing_user is not None except: pass if email_exists: return make_response( pack_error("Sorry. This email is already taken.")) # @TODO: fix hardcoded values # password = '******' # salt, hashed_pass = generate_auth(app.config['SECRET_KEY'], password) added_date = datetime.today() access_end_date = get_expiration_date(180) user = UserEntity.create(email=email, first=first, last=last, minitial=minitial, added_at=added_date, modified_at=added_date, access_expires_at=access_end_date, password_hash="") # roles=user_roles) user_roles = [] try: for role_name in roles: role_entity = RoleEntity.query.filter_by(name=role_name).one() user_roles.append(role_entity) except Exception as exc: app.logger.debug("Problem saving user: {}".format(exc)) [user.roles.append(rol) for rol in user_roles] user = UserEntity.save(user) app.logger.debug("saved user: {}".format(user)) return make_response(pack_success_result(user.serialize()))
def api_list_users(): """ @TODo: use the page_num in the query :rtype: Response :return """ per_page = get_safe_int(request.form.get('per_page')) page_num = get_safe_int(request.form.get('page_num')) app.logger.debug("Show page {} of users".format(page_num)) users = UserEntity.query.all() # users = UserEntity.query.filter(UserEntity.id >= 14).all() if users is None: return make_response(pack_error("no users found")) list_of_users = [i.serialize() for i in users] total_pages = math.ceil(len(list_of_users)/float(per_page)) data = {"total_pages": total_pages, "list_of_users": list_of_users} return make_response(pack_success_result(data))
def test_pack(self): self.assertEquals( '{"message":"msg","status":"error"}', utils.pack_error("msg").replace(' ', '').replace('\n', ''))
def test_pack(): #assert '{"info": "<\' weird || danger;\\\\==="}' \ == utils.pack("info", "<' weird || danger;\===") assert '{"status": "info", "message": "msg"}' == utils.pack_info("msg") assert '{"status": "error", "message": "msg"}' == utils.pack_error("msg")
def test_pack(self): self.assertEquals('{"message":"msg","status":"error"}', utils.pack_error("msg") .replace(' ', '').replace('\n', ''))
def test_pack(): assert '{"message":"msg","status":"error"}' == utils.pack_error("msg") \ .replace(' ', '').replace('\n', '')