Exemple #1
0
def imagelist():
    # Check parameters
    try:
        from_path = request.args.get('path', '')
        want_info = parse_boolean(request.args.get('attributes', ''))
        limit = parse_int(request.args.get('limit', '1000'))
        validate_string(from_path, 1, 1024)
    except ValueError as e:
        raise ParameterError(e)

    # Get extra parameters for image URL construction
    image_params = request.args.to_dict()
    if 'path' in image_params:
        del image_params['path']
    if 'attributes' in image_params:
        del image_params['attributes']
    if 'limit' in image_params:
        del image_params['limit']

    # Get directory listing
    directory_info = get_directory_listing(from_path, False, limit)
    if not directory_info.exists():
        raise DoesNotExistError('Invalid path')

    ret_list = []
    db_session = data_engine.db_get_session()
    db_commit = False
    try:
        # Auto-populate the folders database
        db_folder = auto_sync_folder(
            from_path, data_engine, task_engine, _db_session=db_session
        )
        db_session.commit()

        # Require view permission or file admin
        permissions_engine.ensure_folder_permitted(
            db_folder,
            FolderPermission.ACCESS_VIEW,
            get_session_user()
        )

        # Create the response
        file_list = directory_info.contents()
        img_types = image_engine.get_image_formats()
        base_folder = add_sep(directory_info.name())
        for f in file_list:
            # Filter out non-images
            if get_file_extension(f['filename']) in img_types:
                entry_path = base_folder + f['filename']
                entry = {
                    'filename': f['filename'],
                    'url': external_url_for('image', src=entry_path, **image_params)
                }
                if want_info:
                    db_entry = auto_sync_existing_file(
                        entry_path,
                        data_engine,
                        task_engine,
                        burst_pdf=False,  # Don't burst a PDF just by finding it here
                        _db_session=db_session
                    )
                    entry['id'] = db_entry.id if db_entry else 0
                    entry['folder_id'] = db_entry.folder_id if db_entry else 0
                    entry['title'] = db_entry.title if db_entry else ''
                    entry['description'] = db_entry.description if db_entry else ''
                    entry['width'] = db_entry.width if db_entry else 0
                    entry['height'] = db_entry.height if db_entry else 0
                ret_list.append(entry)

        db_commit = True
    finally:
        try:
            if db_commit:
                db_session.commit()
            else:
                db_session.rollback()
        finally:
            db_session.close()

    return make_api_success_response(ret_list)
Exemple #2
0
def imagelist():
    # Check parameters
    try:
        from_path = request.args.get('path', '')
        want_info = parse_boolean(request.args.get('attributes', ''))
        start = parse_int(request.args.get('start', '0'))
        limit = parse_int(request.args.get('limit', '1000'))
        validate_string(from_path, 1, 1024)
        validate_number(start, 0, 999999999)
        validate_number(limit, 1, 1000)
    except ValueError as e:
        raise ParameterError(e)

    # Get extra parameters for image URL construction, remove API parameters
    image_params = request.args.to_dict()
    image_params.pop('path', None)
    image_params.pop('attributes', None)
    image_params.pop('start', None)
    image_params.pop('limit', None)

    # Get directory listing
    directory_info = get_directory_listing(from_path, False, 2, start, limit)
    if not directory_info.exists():
        raise DoesNotExistError('Invalid path')

    ret_list = []
    db_session = data_engine.db_get_session()
    db_commit = False
    try:
        # Auto-populate the folders database
        db_folder = auto_sync_folder(
            from_path, data_engine, task_engine, _db_session=db_session
        )
        db_session.commit()

        # Require view permission or file admin
        permissions_engine.ensure_folder_permitted(
            db_folder,
            FolderPermission.ACCESS_VIEW,
            get_session_user()
        )
        # Get download permission in case we need to return it later
        can_download = permissions_engine.is_folder_permitted(
            db_folder,
            FolderPermission.ACCESS_DOWNLOAD,
            get_session_user()
        )

        # Create the response
        file_list = directory_info.contents()
        supported_img_types = image_engine.get_image_formats(supported_only=True)
        base_folder = add_sep(directory_info.name())
        for f in file_list:
            # v2.6.4 Return unsupported files too. If you want to reverse this change,
            # the filtering needs to be elsewhere for 'start' and 'limit' to work properly
            supported_file = get_file_extension(f['filename']) in supported_img_types
            file_path = base_folder + f['filename']

            if want_info:
                # Need to return the database fields too
                if supported_file:
                    db_entry = auto_sync_existing_file(
                        file_path,
                        data_engine,
                        task_engine,
                        burst_pdf=False,  # Don't burst a PDF just by finding it here
                        _db_session=db_session
                    )
                    db_entry = _prep_image_object(db_entry, can_download, **image_params)
                else:
                    db_entry = _prep_blank_image_object()
                    db_entry.filename = f['filename']
                    db_entry.supported = False
                # Return images in full (standard) image dict format
                entry = object_to_dict(db_entry, _omit_fields)
            else:
                # Return images in short dict format
                entry = {
                    'filename': f['filename'],
                    'supported': supported_file,
                    'url': (external_url_for('image', src=file_path, **image_params)
                            if supported_file else '')
                }

            ret_list.append(entry)

        db_commit = True
    finally:
        try:
            if db_commit:
                db_session.commit()
            else:
                db_session.rollback()
        finally:
            db_session.close()

    return make_api_success_response(ret_list)
Exemple #3
0
def precache_images(start_dir, file_specs, templates):
    """
    Performs the main pre-caching function as described by the file header.
    """
    from imageserver.flask_app import app
    from imageserver.filesystem_sync import auto_sync_existing_file
    from imageserver.image_attrs import ImageAttrs
    from imageserver.util import add_sep

    # Disable logging to prevent app startup and image errors going to main log
    # app.log.set_enabled(False)

    # Validate params
    rc = validate_params(start_dir, file_specs, templates)
    if (rc != RETURN_OK):
        return rc

    # Get base path with trailing /
    images_base_dir = add_sep(os.path.abspath(app.config['IMAGES_BASE_DIR']))

    # Init stats and stop conditions
    stats = PreCacheStats()
    last_cache_pct = 0
    cache_full = False
    keyboard_interrupt = False

    # Get directory walking errors
    def walk_err(os_error):
        stats.inc_dir_skipped_count()

    # Walk directory tree
    try:
        for cur_dir, sub_dirs, files in os.walk(
            start_dir, onerror=walk_err, followlinks=True
        ):
            log(cur_dir)
            stats.inc_total_dir_count()
            # Remove files and directories beginning with '.'
            for d in sub_dirs:
                if d.startswith('.'):
                    sub_dirs.remove(d)
            for f in files:
                if f.startswith('.'):
                    files.remove(f)
            # Get relative path from IMAGES_BASE_DIR/
            if not cur_dir.startswith(images_base_dir):
                log('ERROR: Cannot calculate relative image path from ' + str(cur_dir))
                stats.inc_dir_skipped_count()
            else:
                relative_dir = cur_dir[len(images_base_dir):]
                # Apply file specs
                for file_spec in file_specs:
                    file_matches = fnmatch.filter(files, file_spec)
                    for file_name in file_matches:
                        # Check whether the cache is full (or now self-emptying) once in a while
                        if stats.total_file_count % 10 == 0:
                            cache_pct = app.cache_engine.size_percent()
                            log('\tCache level %d%%' % cache_pct)
                            if (cache_pct < last_cache_pct) or (cache_pct >= 80):
                                raise CacheFullException()
                            else:
                                last_cache_pct = cache_pct
                        # Process matched file
                        stats.inc_total_file_count()
                        for template in templates:
                            try:
                                log('\tProcessing %s with template %s' % (file_name, template))
                                image_path = os.path.join(relative_dir, file_name)
                                db_image = auto_sync_existing_file(
                                    image_path, app.data_engine, app.task_engine
                                )
                                image_attrs = ImageAttrs(
                                    db_image.src, db_image.id, template=template
                                )
                                app.image_engine.finalise_image_attrs(image_attrs)
                                gen_image = app.image_engine.get_image(image_attrs)
                                if gen_image.is_from_cache():
                                    log('\tImage already in cache')
                                    stats.inc_images_already_cached_count()
                                else:
                                    log('\tGenerated %s image, %d bytes' % (
                                        gen_image.attrs().format(), len(gen_image.data())
                                    ))
                                    stats.inc_images_created_count()
                            except KeyboardInterrupt as kbe:
                                raise kbe
                            except Exception as e:
                                log('ERROR: ' + str(e))
                                stats.inc_images_error_count()
    except CacheFullException:
        cache_full = True
    except KeyboardInterrupt:
        keyboard_interrupt = True

    # Show stop reason and stats
    if keyboard_interrupt:
        log('---\nInterrupted by user.\n---')
    elif cache_full:
        log('---\nCache is full.\n---')
    else:
        log('---\nNo more files.\n---')

    log('%d matching file(s) found in %d directories.' % (
        stats.total_file_count, stats.total_dir_count
    ))
    log('%d image(s) were generated and cached.' % stats.images_created_count)
    log('%d image(s) were already in cache.' % stats.images_already_cached_count)
    log('%d image(s) skipped due to error.' % stats.images_error_count)
    log('%d directories skipped due to error.' % stats.dir_skipped_count)
    log('Cache is now %d%% full.' % app.cache_engine.size_percent())
    return RETURN_OK
Exemple #4
0
def precache_images(start_dir, file_specs, templates):
    """
    Performs the main pre-caching function as described by the file header.
    """
    from imageserver.flask_app import app
    from imageserver.filesystem_sync import auto_sync_existing_file
    from imageserver.image_attrs import ImageAttrs
    from imageserver.util import add_sep

    # Disable logging to prevent app startup and image errors going to main log
    # app.log.set_enabled(False)

    # Validate params
    rc = validate_params(start_dir, file_specs, templates)
    if (rc != RETURN_OK):
        return rc

    # Get base path with trailing /
    images_base_dir = add_sep(os.path.abspath(app.config['IMAGES_BASE_DIR']))

    # Init stats and stop conditions
    stats = PreCacheStats()
    last_cache_pct = 0
    cache_full = False
    keyboard_interrupt = False

    # Get directory walking errors
    def walk_err(os_error):
        stats.inc_dir_skipped_count()

    # Walk directory tree
    try:
        for cur_dir, sub_dirs, files in os.walk(
            unicode(start_dir), onerror=walk_err, followlinks=True
        ):
            log(cur_dir)
            stats.inc_total_dir_count()
            # Remove files and directories beginning with '.'
            for d in sub_dirs:
                if d.startswith('.'):
                    sub_dirs.remove(d)
            for f in files:
                if f.startswith('.'):
                    files.remove(f)
            # Get relative path from IMAGES_BASE_DIR/
            if not cur_dir.startswith(images_base_dir):
                log('ERROR: Cannot calculate relative image path from ' + str(cur_dir))
                stats.inc_dir_skipped_count()
            else:
                relative_dir = cur_dir[len(images_base_dir):]
                # Apply file specs
                for file_spec in file_specs:
                    file_matches = fnmatch.filter(files, file_spec)
                    for file_name in file_matches:
                        # Check whether the cache is full (or now self-emptying) once in a while
                        if stats.total_file_count % 10 == 0:
                            cache_pct = app.cache_engine.size_percent()
                            log('\tCache level %d%%' % cache_pct)
                            if (cache_pct < last_cache_pct) or (cache_pct >= 80):
                                raise CacheFullException()
                            else:
                                last_cache_pct = cache_pct
                        # Process matched file
                        stats.inc_total_file_count()
                        for template in templates:
                            try:
                                log('\tProcessing %s with template %s' % (file_name, template))
                                image_path = os.path.join(relative_dir, file_name)
                                db_image = auto_sync_existing_file(
                                    image_path, app.data_engine, app.task_engine
                                )
                                image_attrs = ImageAttrs(
                                    db_image.src, db_image.id, template=template
                                )
                                app.image_engine.finalise_image_attrs(image_attrs)
                                gen_image = app.image_engine.get_image(image_attrs)
                                if gen_image.is_from_cache():
                                    log('\tImage already in cache')
                                    stats.inc_images_already_cached_count()
                                else:
                                    log('\tGenerated %s image, %d bytes' % (
                                        gen_image.attrs().format(), len(gen_image.data())
                                    ))
                                    stats.inc_images_created_count()
                            except KeyboardInterrupt as kbe:
                                raise kbe
                            except Exception as e:
                                log('ERROR: ' + str(e))
                                stats.inc_images_error_count()
    except CacheFullException:
        cache_full = True
    except KeyboardInterrupt:
        keyboard_interrupt = True

    # Show stop reason and stats
    if keyboard_interrupt:
        log('---\nInterrupted by user.\n---')
    elif cache_full:
        log('---\nCache is full.\n---')
    else:
        log('---\nNo more files.\n---')

    log('%d matching file(s) found in %d directories.' % (
        stats.total_file_count, stats.total_dir_count
    ))
    log('%d image(s) were generated and cached.' % stats.images_created_count)
    log('%d image(s) were already in cache.' % stats.images_already_cached_count)
    log('%d image(s) skipped due to error.' % stats.images_error_count)
    log('%d directories skipped due to error.' % stats.dir_skipped_count)
    log('Cache is now %d%% full.' % app.cache_engine.size_percent())
    return RETURN_OK