def imagedetails(): # Get/check parameters try: src = request.args.get('src', '') validate_string(src, 1, 1024) except ValueError as e: raise ParameterError(e) # v2.6.4 Don't allow this call to populate the database with unsupported files supported_file = ( get_file_extension(src) in image_engine.get_image_formats(supported_only=True) ) if not supported_file and path_exists(src, require_file=True): raise ImageError('The file is not a supported image format') # Get the image database entry db_image = auto_sync_file(src, data_engine, task_engine) if not db_image or db_image.status == Image.STATUS_DELETED: raise DoesNotExistError(src) # Require view permission or file admin permissions_engine.ensure_folder_permitted( db_image.folder, FolderPermission.ACCESS_VIEW, get_session_user() ) return make_api_success_response(object_to_dict( _prep_image_object(db_image), _omit_fields ))
def delete_portfolio_export(folio_export, history_user, history_info, _db_session=None): """ Deletes a portfolio export record and the associated zip file (if it exists), and adds an audit trail entry for the parent portfolio. If you supply a database session it will be committed before the zip file is deleted, so that files are only deleted once the database operations are known to have worked. Raises a ServerTooBusyError if the export is still in progress. Raises an OSError if the zip file or directory cannot be deleted. """ db_session = _db_session or data_engine.db_get_session() try: # Ensure we can access folio_export.portfolio if not data_engine.object_in_session(folio_export, db_session): folio_export = data_engine.get_object(FolioExport, folio_export.id, _db_session=db_session) # Check whether the export task is running if folio_export.task_id: task = task_engine.get_task(folio_export.task_id, _db_session=db_session) if (task and task.status == Task.STATUS_ACTIVE) or ( task and task.status == Task.STATUS_PENDING and not task_engine.cancel_task(task)): raise ServerTooBusyError( 'this export is currently in progress, wait a while then try again' ) # Delete and add history in one commit data_engine.add_portfolio_history(folio_export.portfolio, history_user, FolioHistory.ACTION_UNPUBLISHED, history_info, _db_session=db_session, _commit=False) data_engine.delete_object(folio_export, _db_session=db_session, _commit=True) # If we got this far the database delete worked and we now need to # delete the exported zip file zip_rel_path = get_portfolio_export_file_path(folio_export) if folio_export.filename: delete_file(zip_rel_path) # And if the zip directory is now empty, delete the directory too zip_rel_dir = get_portfolio_directory(folio_export.portfolio) if path_exists(zip_rel_dir, require_directory=True): zips_count = count_files(zip_rel_dir, recurse=False) if zips_count[0] == 0: delete_dir(zip_rel_dir) finally: if not _db_session: db_session.close()
def delete(self, folder_id): """ Deletes a disk folder """ # v4.1 #10 delete_folder() doesn't care whether it exists, but we want the # API to return a "not found" if the folder doesn't exist on disk # (and as long as the database is already in sync with that) db_folder = data_engine.get_folder(folder_id) if db_folder is None: raise DoesNotExistError(str(folder_id)) if not path_exists(db_folder.path, require_directory=True ) and db_folder.status == Folder.STATUS_DELETED: raise DoesNotExistError(db_folder.path) # Run this as a background task in case it takes a long time task = task_engine.add_task(get_session_user(), 'Delete disk folder %d' % folder_id, 'delete_folder', {'folder_id': folder_id}, Task.PRIORITY_HIGH, 'info', 'error', 10) if task is None: # Task already submitted return make_api_success_response(task_accepted=True) else: return self._task_response(task, 30)
def delete(self, image_id): """ Deletes a file from disk """ # Get image data db_img = data_engine.get_image(image_id=image_id) if not db_img: raise DoesNotExistError(str(image_id)) # v4.1 #10 delete_file() doesn't care whether the file exists, but we # want the API to return a "not found" if the file doesn't exist # (and as long as the database is already in sync with that) if not path_exists( db_img.src, require_file=True) and db_img.status == Image.STATUS_DELETED: raise DoesNotExistError(db_img.src) # Delete db_img = delete_file(db_img, get_session_user(), data_engine, permissions_engine) # Remove cached images for old path image_engine._uncache_image_id(db_img.id) # Return updated image return make_api_success_response( object_to_dict(_prep_image_object(db_img)))
def upload(): # Get URL parameters for the upload file_list = request.files.getlist('files') path_index = request.form.get('path_index', '-1') # Index into IMAGE_UPLOAD_DIRS or -1 path = request.form.get('path', '') # Manual path when path_index is -1 overwrite = request.form.get('overwrite') ret_dict = {} try: current_user = get_session_user() assert current_user is not None # Check params path_index = parse_int(path_index) if overwrite != 'rename': overwrite = parse_boolean(overwrite) validate_string(path, 0, 1024) if not path and path_index < 0: raise ValueError('Either path or path_index is required') if len(file_list) < 1: raise ValueError('No files have been attached') if path_index >= 0: # Get a "trusted" pre-defined upload folder # image_engine.put_image() will create it if it doesn't exist _, path = get_upload_directory(path_index) else: # A manually specified folder is "untrusted" and has to exist already if not path_exists(path): raise DoesNotExistError('Path \'' + path + '\' does not exist') # Loop over the upload files put_image_exception = None can_download = None saved_files = [] for wkfile in file_list: original_filepath = wkfile.filename original_filename = filepath_filename(original_filepath) # v2.7.1 added if original_filename: db_image = None try: # Don't allow filenames like "../../../etc/passwd" safe_filename = secure_filename( original_filename, app.config['ALLOW_UNICODE_FILENAMES'] ) # v2.7.1 If we already saved a file as safe_filename during this upload, # override this one to have overwrite=rename overwrite_flag = 'rename' if safe_filename in saved_files else overwrite # Save (this also checks user-folder permissions) _, db_image = image_engine.put_image( current_user, wkfile, path, safe_filename, overwrite_flag ) # v2.7.1 Keep a record of what filenames we used during this upload saved_files.append(safe_filename) except Exception as e: # Save the error to use as our overall return value if put_image_exception is None: put_image_exception = e # This loop failure, add the error info to our return data ret_dict[original_filepath] = { 'error': create_api_error_dict(e, logger) } if db_image: # Calculate download permission once (all files are going to same folder) if can_download is None: can_download = permissions_engine.is_folder_permitted( db_image.folder, FolderPermission.ACCESS_DOWNLOAD, get_session_user() ) # This loop success ret_dict[original_filepath] = object_to_dict( _prep_image_object(db_image, can_download), _omit_fields ) else: logger.warning('Upload received blank filename, ignoring file') # Loop complete. If we had an exception, raise it now. if put_image_exception is not None: raise put_image_exception except Exception as e: # put_image returns ValueError for parameter errors if type(e) is ValueError: e = ParameterError(str(e)) # Attach whatever data we have to return with the error # Caller can then decide whether to continue if some files worked e.api_data = ret_dict raise e finally: # Store the result for the upload_complete page cache_engine.raw_put( 'UPLOAD_API:' + str(current_user.id), ret_dict, expiry_secs=(60 * 60 * 24 * 7) ) # If here, all files were uploaded successfully return make_api_success_response(ret_dict)