Esempio n. 1
0
def upload_archived_resource(resource_id_dir, filename, saved_file):
    '''
    Uploads the resources to s3filestore in directory
    <S3FILESTORE__AWS_BUCKET_NAME>/<S3FILESTORE__AWS_STORAGE_PATH>/archived_resources/
    '''

    storage_path = config.get('ckanext.s3filestore.aws_storage_path')

    if not storage_path:
        log.warning('Not saved to filestore because no value for '
                    'ckanext.s3filestore.aws_storage_path in config')
        raise ArchiveError(
            _('No value for ckanext.s3filestore.aws_storage_path in config'))

    with open(saved_file, 'rb') as save_file:
        upload = uploader.get_uploader('archived_resources')
        upload.upload_file = save_file
        upload.filename = filename
        upload.filepath = os.path.join(storage_path, 'archived_resources',
                                       resource_id_dir, filename)
        upload.id = filename
        upload.clear = False
        upload.upload(uploader.get_max_resource_size())

    return upload, upload.filepath
Esempio n. 2
0
    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        errors = e.error_dict['resources'][-1]
        raise ValidationError(errors)

    ## Get out resource_id resource from model as it will not appear in
    ## package_show until after commit
    upload.upload(context['package'].resources[-1].id,
                  uploader.get_max_resource_size())
    model.repo.commit()

    ##  Run package show again to get out actual last_resource
    pkg_dict = _get_action('package_show')(context, {'id': package_id})
    resource = pkg_dict['resources'][-1]

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_create(context, resource)

    return resource


def resource_view_create(context, data_dict):
    '''Creates a new resource view.
def resource_update(context, data_dict):
    '''Update a resource.

    To update a resource you must be authorized to update the dataset that the
    resource belongs to.

    For further parameters see
    :py:func:`~ckan.logic.action.create.resource_create`.

    :param id: the id of the resource to update
    :type id: string

    :returns: the updated resource
    :rtype: string

    '''
    model = context['model']
    user = context['user']
    id = _get_or_bust(data_dict, "id")

    if not data_dict.get('url'):
        data_dict['url'] = ''

    resource = model.Resource.get(id)
    context["resource"] = resource
    old_resource_format = resource.format

    if not resource:
        log.debug('Could not find resource %s', id)
        raise NotFound(_('Resource was not found.'))

    _check_access('resource_update', context, data_dict)
    del context["resource"]

    package_id = resource.package.id
    pkg_dict = _get_action('package_show')(dict(context, return_type='dict'), {
        'id': package_id
    })

    for n, p in enumerate(pkg_dict['resources']):
        if p['id'] == id:
            break
    else:
        log.error('Could not find resource %s after all', id)
        raise NotFound(_('Resource was not found.'))

    # Persist the datastore_active extra if already present and not provided
    if ('datastore_active' in resource.extras
            and 'datastore_active' not in data_dict):
        data_dict['datastore_active'] = resource.extras['datastore_active']

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_update(context, pkg_dict['resources'][n], data_dict)

    upload = uploader.get_resource_uploader(data_dict)

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict and 'url_type' in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'][n] = data_dict

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError as e:
        try:
            raise ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise ValidationError(e.error_dict)

    upload.upload(id, uploader.get_max_resource_size())
    model.repo.commit()

    resource = _get_action('resource_show')(context, {'id': id})

    if old_resource_format != resource['format']:
        _get_action('resource_create_default_resource_views')(
            {
                'model': context['model'],
                'user': context['user'],
                'ignore_auth': True
            }, {
                'package': updated_pkg_dict,
                'resource': resource
            })

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    return resource
Esempio n. 4
0
def package_update(context, data_dict):
    '''Update a dataset (package).

    You must be authorized to edit the dataset and the groups that it belongs
    to.

    .. note:: Update methods may delete parameters not explicitly provided in the
        data_dict. If you want to edit only a specific attribute use `package_patch`
        instead.

    It is recommended to call
    :py:func:`ckan.logic.action.get.package_show`, make the desired changes to
    the result, and then call ``package_update()`` with it.

    Plugins may change the parameters of this function depending on the value
    of the dataset's ``type`` attribute, see the
    :py:class:`~ckan.plugins.interfaces.IDatasetForm` plugin interface.

    For further parameters see
    :py:func:`~ckan.logic.action.create.package_create`.

    :param id: the name or id of the dataset to update
    :type id: string

    :returns: the updated dataset (if ``'return_package_dict'`` is ``True`` in
              the context, which is the default. Otherwise returns just the
              dataset id)
    :rtype: dictionary

    '''
    model = context['model']
    session = context['session']
    name_or_id = data_dict.get('id') or data_dict.get('name')
    if name_or_id is None:
        raise ValidationError({'id': _('Missing value')})

    pkg = model.Package.get(name_or_id)
    if pkg is None:
        raise NotFound(_('Package was not found.'))
    context["package"] = pkg

    # immutable fields
    data_dict["id"] = pkg.id
    data_dict['type'] = pkg.type

    _check_access('package_update', context, data_dict)

    user = context['user']
    # get the schema
    package_plugin = lib_plugins.lookup_package_plugin(pkg.type)
    if 'schema' in context:
        schema = context['schema']
    else:
        schema = package_plugin.update_package_schema()

    if 'api_version' not in context:
        # check_data_dict() is deprecated. If the package_plugin has a
        # check_data_dict() we'll call it, if it doesn't have the method we'll
        # do nothing.
        check_data_dict = getattr(package_plugin, 'check_data_dict', None)
        if check_data_dict:
            try:
                package_plugin.check_data_dict(data_dict, schema)
            except TypeError:
                # Old plugins do not support passing the schema so we need
                # to ensure they still work.
                package_plugin.check_data_dict(data_dict)

    resource_uploads = []
    for resource in data_dict.get('resources', []):
        # file uploads/clearing
        upload = uploader.get_resource_uploader(resource)

        if 'mimetype' not in resource:
            if hasattr(upload, 'mimetype'):
                resource['mimetype'] = upload.mimetype

        if 'size' not in resource and 'url_type' in resource:
            if hasattr(upload, 'filesize'):
                resource['size'] = upload.filesize

        resource_uploads.append(upload)

    data, errors = lib_plugins.plugin_validate(
        package_plugin, context, data_dict, schema, 'package_update')
    log.debug('package_update validate_errs=%r user=%s package=%s data=%r',
              errors, context.get('user'),
              context.get('package').name if context.get('package') else '',
              data)

    if errors:
        model.Session.rollback()
        raise ValidationError(errors)

    #avoid revisioning by updating directly
    model.Session.query(model.Package).filter_by(id=pkg.id).update(
        {"metadata_modified": datetime.datetime.utcnow()})
    model.Session.refresh(pkg)

    pkg = model_save.package_dict_save(data, context)

    context_org_update = context.copy()
    context_org_update['ignore_auth'] = True
    context_org_update['defer_commit'] = True
    _get_action('package_owner_org_update')(context_org_update,
                                            {'id': pkg.id,
                                             'organization_id': pkg.owner_org})

    # Needed to let extensions know the new resources ids
    model.Session.flush()
    for index, (resource, upload) in enumerate(
            zip(data.get('resources', []), resource_uploads)):
        resource['id'] = pkg.resources[index].id

        upload.upload(resource['id'], uploader.get_max_resource_size())

    for item in plugins.PluginImplementations(plugins.IPackageController):
        item.edit(pkg)

        item.after_dataset_update(context, data)

    # Create activity
    if not pkg.private:
        user_obj = model.User.by_name(user)
        if user_obj:
            user_id = user_obj.id
        else:
            user_id = 'not logged in'

        activity = pkg.activity_stream_item('changed', user_id)
        session.add(activity)

    if not context.get('defer_commit'):
        model.repo.commit()

    log.debug('Updated object %s' % pkg.name)

    return_id_only = context.get('return_id_only', False)

    # Make sure that a user provided schema is not used on package_show
    context.pop('schema', None)

    # we could update the dataset so we should still be able to read it.
    context['ignore_auth'] = True
    output = data_dict['id'] if return_id_only \
            else _get_action('package_show')(context, {'id': data_dict['id']})

    return output
Esempio n. 5
0
    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        errors = e.error_dict['resources'][-1]
        raise ValidationError(errors)

    ## Get out resource_id resource from model as it will not appear in
    ## package_show until after commit
    upload.upload(context['package'].resources[-1].id,
                  uploader.get_max_resource_size())
    model.repo.commit()

    ##  Run package show again to get out actual last_resource
    pkg_dict = _get_action('package_show')(context, {'id': package_id})
    resource = pkg_dict['resources'][-1]

    return resource


def related_create(context, data_dict):
    '''Add a new related item to a dataset.

    You must provide your API key in the Authorization header.

    :param title: the title of the related item
Esempio n. 6
0
def resource_update(context, data_dict):
    '''Update a resource.

    To update a resource you must be authorized to update the dataset that the
    resource belongs to.

    For further parameters see
    :py:func:`~ckan.logic.action.create.resource_create`.

    :param id: the id of the resource to update
    :type id: string

    :returns: the updated resource
    :rtype: string

    '''
    model = context['model']
    user = context['user']
    id = _get_or_bust(data_dict, "id")

    if not data_dict.get('url'):
        data_dict['url'] = ''

    resource = model.Resource.get(id)
    context["resource"] = resource
    old_resource_format = resource.format

    if not resource:
        log.debug('Could not find resource %s', id)
        raise NotFound(_('Resource was not found.'))

    _check_access('resource_update', context, data_dict)
    del context["resource"]

    package_id = resource.package.id
    pkg_dict = _get_action('package_show')(dict(context, return_type='dict'),
        {'id': package_id})

    for n, p in enumerate(pkg_dict['resources']):
        if p['id'] == id:
            break
    else:
        log.error('Could not find resource %s after all', id)
        raise NotFound(_('Resource was not found.'))

    # Persist the datastore_active extra if already present and not provided
    if ('datastore_active' in resource.extras and
            'datastore_active' not in data_dict):
        data_dict['datastore_active'] = resource.extras['datastore_active']

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_update(context, pkg_dict['resources'][n], data_dict)

    upload = uploader.get_resource_uploader(data_dict)

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict and 'url_type' in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'][n] = data_dict

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError as e:
        try:
            raise ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise ValidationError(e.error_dict)

    upload.upload(id, uploader.get_max_resource_size())
    model.repo.commit()

    resource = _get_action('resource_show')(context, {'id': id})

    if old_resource_format != resource['format']:
        _get_action('resource_create_default_resource_views')(
            {'model': context['model'], 'user': context['user'],
             'ignore_auth': True},
            {'package': updated_pkg_dict,
             'resource': resource})

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    return resource
Esempio n. 7
0
def resource_create(context, data_dict):
    '''Overrides CKAN's ``resource_create`` action. Calculates checksum of
    the file and if file exists it will notify the user.
    '''

    model = context['model']

    package_id = get_or_bust(data_dict, 'package_id')
    if not data_dict.get('url'):
        data_dict['url'] = ''

    pkg_dict = get_action('package_show')(
        dict(context, return_type='dict'),
        {'id': package_id})

    check_access('resource_create', context, data_dict)

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_create(context, data_dict)

    if 'resources' not in pkg_dict:
        pkg_dict['resources'] = []

    upload = uploader.get_resource_uploader(data_dict)

    if hasattr(upload, 'upload_file'):
        checksum = '%s-%s' % (_calculate_checksum(upload.upload_file), 'resource')

        resources = model.Session.query(model.Resource).\
            filter_by(package_id=pkg_dict['id'], state='active').all()
        for rsc in resources:
            if rsc.extras.get('checksum') == checksum:
                raise ValidationError(
                    {_('message'): [_('Resource already exists')]})

        data_dict['checksum'] = checksum
    elif data_dict.get('url'):
        _validate_link(data_dict.get('url'))
    else:
        raise ValidationError({_('message'): [_('Resource file is missing')]})

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError as e:
        try:
            raise ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise ValidationError(e.error_dict)

    # Get out resource_id resource from model as it will not appear in
    # package_show until after commit
    upload.upload(context['package'].resources[-1].id,
                  uploader.get_max_resource_size())

    model.repo.commit()

    #  Run package show again to get out actual last_resource
    updated_pkg_dict = get_action('package_show')(context, {'id': package_id})
    resource = updated_pkg_dict['resources'][-1]

    #  Add the default views to the new resource
    get_action('resource_create_default_resource_views')(
        {'model': context['model'],
         'user': context['user'],
         'ignore_auth': True
         },
        {'resource': resource,
         'package': updated_pkg_dict
         })

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_create(context, resource)

    try:
        # When running some of the tests, it gives wrong results caused by
        # updating SOLR through the update_package_stats. This check is only
        # for the tests.
        skip_update_package_stats = data_dict.get('skip_update_package_stats')

        if not skip_update_package_stats:
            update_package_stats(resource['package_id'])
    except Exception as e:
        log.error(e)

    return resource
Esempio n. 8
0
        plugin.before_update(context, pkg_dict['resources'][n], data_dict)

    upload = uploader.get_resource_uploader(data_dict)

    pkg_dict['resources'][n] = data_dict

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        errors = e.error_dict['resources'][n]
        raise ValidationError(errors)

    upload.upload(id, uploader.get_max_resource_size())
    model.repo.commit()

    resource = _get_action('resource_show')(context, {'id': id})

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    return resource


def resource_view_update(context, data_dict):
    '''Update a resource view.

    To update a resource_view you must be authorized to update the resource
    that the resource_view belongs to.
Esempio n. 9
0
def _save_ueb_package_as_dataset(service_call_results, model_config_dataset_id):
    source = 'uebpackage.tasks._save_ueb_package_as_dataset():'
    ckan_default_dir = uebhelper.StringSettings.ckan_user_session_temp_dir  # '/tmp/ckan'

    # get the matching model configuration dataset object
    model_config_dataset_obj = base.model.Package.get(model_config_dataset_id)
    model_config_dataset_title = model_config_dataset_obj.title
    model_config_dataset_owner_org = model_config_dataset_obj.owner_org
    model_config_dataset_author = model_config_dataset_obj.author

    # create a directory for saving the file
    # this will be a dir in the form of: /tmp/ckan/{random_id}
    random_id = base.model.types.make_uuid()
    destination_dir = os.path.join(ckan_default_dir, random_id)
    os.makedirs(destination_dir)

    model_pkg_filename = uebhelper.StringSettings.ueb_input_model_package_default_filename   # 'ueb_model_pkg.zip'
    model_pkg_file = os.path.join(destination_dir, model_pkg_filename)

    bytes_to_read = 16 * 1024

    try:
        with open(model_pkg_file, 'wb') as file_obj:
            while True:
                data = service_call_results.read(bytes_to_read)
                if not data:
                    break
                file_obj.write(data)
    except Exception as e:
        log.error(source + 'Failed to save the ueb_package zip file to temporary '
                           'location for UEB model configuration dataset ID: %s \n '
                           'Exception: %s' % (model_config_dataset_id, e))
        raise e

    log.info(source + 'ueb_package zip file was saved to temporary location for '
                      'UEB model configuration dataset ID: %s' % model_config_dataset_id)

    # upload the file to CKAN file store
    # resource_metadata = _upload_file(model_pkg_file)
    # if resource_metadata:
    #     log.info(source + 'UEB model package zip file was uploaded for model configuration dataset ID:%s' % model_config_dataset_id)
    # else:
    #     log.error(source + 'Failed to upload UEB model package zip file '
    #                        'for model configuration dataset ID: %s' % model_config_dataset_id)
    #     return
    #
    # # retrieve some of the file meta data
    # resource_url = resource_metadata.get('_label')  # this will return datetime stamp/filename
    #
    # resource_url = '/storage/f/' + resource_url
    # if resource_url.startswith('/'):
    #     resource_url = base.config.get('ckan.site_url', '').rstrip('/') + resource_url
    # else:
    #     resource_url = base.config.get('ckan.site_url', '') + resource_url
    #
    # resource_created_date = resource_metadata.get('_creation_date')
    # resource_name = resource_metadata.get('filename_original')
    # resource_size = resource_metadata.get('_content_length')
    #
    # # add the uploaded ueb model pkg data file as a resource to the dataset
    # resource_create_action = tk.get_action('resource_create')
    # context = {'model': base.model, 'session': base.model.Session, 'save': 'save'}
    # user = uebhelper.get_site_user()
    # context['user'] = user.get('name')
    # context['ignore_auth'] = True
    # context['validate'] = False

    user = uebhelper.get_site_user()
    # create a package
    package_create_action = tk.get_action('package_create')

    # create unique package name using the current time stamp as a postfix to any package name
    unique_postfix = datetime.now().isoformat().replace(':', '-').replace('.', '-').lower()
    pkg_title = model_config_dataset_title

    data_dict = {
                    'name': 'model_package_' + unique_postfix,  # this needs to be unique as required by DB
                    'type': 'model-package',  # dataset type as defined in custom dataset plugin
                    'title': pkg_title,
                    'owner_org': model_config_dataset_owner_org,
                    'author': model_config_dataset_author,
                    'notes': 'UEB model package',
                    'pkg_model_name': 'UEB',
                    'model_version': '1.0',
                    'north_extent': '',
                    'south_extent': '',
                    'east_extent': '',
                    'west_extent': '',
                    'simulation_start_day': '',
                    'simulation_end_day': '',
                    'time_step': '',
                    'package_type': u'Input',
                    'package_run_status': 'Not yet submitted',
                    'package_run_job_id': '',
                    'dataset_type': 'model-package'
                 }

    context = {'model': base.model, 'session': base.model.Session, 'ignore_auth': True, 'user': user.get('name'), 'save': 'save'}
    try:
        uebhelper.register_translator()     # this is needed since we are creating a package in a background operation
        pkg_dict = package_create_action(context, data_dict)
        log.info(source + 'A new dataset was created for UEB input model package with name: %s' % data_dict['title'])
    except Exception as e:
        log.error(source + 'Failed to create a new dataset for ueb input model package for'
                           ' the related model configuration dataset title: %s \n Exception: %s' % (pkg_title, e))
        raise e

    pkg_id = pkg_dict['id']

    if not 'resources' in pkg_dict:
        pkg_dict['resources'] = []

    file_name = munge.munge_filename(model_pkg_filename)
    resource = {'url': file_name, 'url_type': 'upload'}
    upload = uploader.ResourceUpload(resource)
    upload.filename = file_name
    upload.upload_file = open(model_pkg_file, 'r')
    data_dict = {'format': 'zip', 'name': file_name, 'url': file_name, 'url_type': 'upload'}
    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        # update the package
        package_update_action = tk.get_action('package_update')
        package_update_action(context, pkg_dict)
        context.pop('defer_commit')
    except Exception as e:
        log.error(source + ' Failed to update the new dataset for adding the input model package zip file as'
                            ' a resource.\n Exception: %s' % e)

        raise e

    # link this newly created model package dataset to the model configuration dataset
    package_relationship_create_action = tk.get_action('package_relationship_create')
    data_dict = {'subject': pkg_id, 'object': model_config_dataset_id, 'type': 'links_to'}
    package_relationship_create_action(context, data_dict)

    # Get out resource_id resource from model as it will not appear in
    # package_show until after commit
    upload.upload(context['package'].resources[-1].id, uploader.get_max_resource_size())
    base.model.repo.commit()

    # update the related model configuration dataset to show that the package is available

    data_dict = {'package_availability': 'Available'}
    update_msg = 'system auto updated ueb package dataset'
    background_task = True
    try:
        updated_package = uebhelper.update_package(model_config_dataset_id, data_dict, update_msg, background_task)
        log.info(source + 'UEB model configuration dataset was updated as a result of '
                          'receiving model input package for dataset:%s' % updated_package['name'])
    except Exception as e:
        log.error(source + 'Failed to update UEB model configuration dataset after '
                           'receiving model input package for dataset ID:%s \n'
                           'Exception: %s' % (model_config_dataset_id, e))
        raise e
def resource_update(context, data_dict):
    '''Update a resource.

    This is duplicate of the CKAN core resource_update action, with just the
    addition of a synchronous data validation step.

    This is of course not ideal but it's the only way right now to hook
    reliably into the creation process without overcomplicating things.
    Hopefully future versions of CKAN will incorporate more flexible hook
    points that will allow a better approach.

    '''
    model = context['model']
    id = t.get_or_bust(data_dict, "id")

    if not data_dict.get('url'):
        data_dict['url'] = ''

    resource = model.Resource.get(id)
    context["resource"] = resource
    old_resource_format = resource.format

    if not resource:
        log.debug('Could not find resource %s', id)
        raise t.ObjectNotFound(t._('Resource was not found.'))

    t.check_access('resource_update', context, data_dict)
    del context["resource"]

    package_id = resource.package.id
    pkg_dict = t.get_action('package_show')(dict(context, return_type='dict'),
                                            {'id': package_id})

    for n, p in enumerate(pkg_dict['resources']):
        if p['id'] == id:
            break
    else:
        log.error('Could not find resource %s after all', id)
        raise t.ObjectNotFound(t._('Resource was not found.'))

    # Persist the datastore_active extra if already present and not provided
    if ('datastore_active' in resource.extras and
            'datastore_active' not in data_dict):
        data_dict['datastore_active'] = resource.extras['datastore_active']

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_update(context, pkg_dict['resources'][n], data_dict)

    upload = uploader.get_resource_uploader(data_dict)

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict and 'url_type' in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'][n] = data_dict

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = t.get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except t.ValidationError as e:
        try:
            raise t.ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise t.ValidationError(e.error_dict)

    upload.upload(id, uploader.get_max_resource_size())

    # Custom code starts

    if get_update_mode_from_config() == u'sync':

        run_validation = True
        for plugin in plugins.PluginImplementations(IDataValidation):
            if not plugin.can_validate(context, data_dict):
                log.debug('Skipping validation for resource %s', id)
                run_validation = False

        if run_validation:
            is_local_upload = (
                hasattr(upload, 'filename') and
                upload.filename is not None and
                isinstance(upload, uploader.ResourceUpload))
            _run_sync_validation(
                id, local_upload=is_local_upload, new_resource=True)

    # Custom code ends

    model.repo.commit()

    resource = t.get_action('resource_show')(context, {'id': id})

    if old_resource_format != resource['format']:
        t.get_action('resource_create_default_resource_views')(
            {'model': context['model'], 'user': context['user'],
             'ignore_auth': True},
            {'package': updated_pkg_dict,
             'resource': resource})

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    return resource
def resource_create(context, data_dict):
    '''Appends a new resource to a datasets list of resources.

    This is duplicate of the CKAN core resource_create action, with just the
    addition of a synchronous data validation step.

    This is of course not ideal but it's the only way right now to hook
    reliably into the creation process without overcomplicating things.
    Hopefully future versions of CKAN will incorporate more flexible hook
    points that will allow a better approach.

    '''
    model = context['model']

    package_id = t.get_or_bust(data_dict, 'package_id')
    if not data_dict.get('url'):
        data_dict['url'] = ''

    pkg_dict = t.get_action('package_show')(
        dict(context, return_type='dict'),
        {'id': package_id})

    t.check_access('resource_create', context, data_dict)

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_create(context, data_dict)

    if 'resources' not in pkg_dict:
        pkg_dict['resources'] = []

    upload = uploader.get_resource_uploader(data_dict)

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        t.get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except t.ValidationError as e:
        try:
            raise t.ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise t.ValidationError(e.error_dict)

    # Get out resource_id resource from model as it will not appear in
    # package_show until after commit
    resource_id = context['package'].resources[-1].id
    upload.upload(resource_id,
                  uploader.get_max_resource_size())

    # Custom code starts

    if get_create_mode_from_config() == u'sync':

        run_validation = True

        for plugin in plugins.PluginImplementations(IDataValidation):
            if not plugin.can_validate(context, data_dict):
                log.debug('Skipping validation for resource %s', resource_id)
                run_validation = False

        if run_validation:
            is_local_upload = (
                hasattr(upload, 'filename') and
                upload.filename is not None and
                isinstance(upload, uploader.ResourceUpload))
            _run_sync_validation(
                resource_id, local_upload=is_local_upload, new_resource=True)

    # Custom code ends

    model.repo.commit()

    #  Run package show again to get out actual last_resource
    updated_pkg_dict = t.get_action('package_show')(
        context, {'id': package_id})
    resource = updated_pkg_dict['resources'][-1]

    #  Add the default views to the new resource
    t.get_action('resource_create_default_resource_views')(
        {'model': context['model'],
         'user': context['user'],
         'ignore_auth': True
         },
        {'resource': resource,
         'package': updated_pkg_dict
         })

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_create(context, resource)

    return resource
    upload = uploader.get_resource_uploader(data_dict)

    pkg_dict['resources'][n] = data_dict
    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        errors = e.error_dict['resources'][n]
        raise ValidationError(errors)

    #Delete previous file (?)
    log.debug("Delete previous file: "+previous_s3_object_url)
    upload.delete(previous_s3_object_url)
    remote_filepath = upload.upload(id, uploader.get_max_resource_size())
    log.debug(remote_filepath)

    if remote_filepath:
      pkg_dict['resources'][n]['url_type'] = ''
      pkg_dict['resources'][n]['url'] = remote_filepath
      _get_action('package_update')(context, pkg_dict)

    model.repo.commit()

    resource = _get_action('resource_show')(context, {'id': id})

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    return resource
Esempio n. 13
0
def resource_update(context, data_dict):
    '''Update a resource.

    To update a resource you must be authorized to update the dataset that the
    resource belongs to.

    For further parameters see
    :py:func:`~ckan.logic.action.create.resource_create`.

    :param id: the id of the resource to update
    :type id: string

    :returns: the updated resource
    :rtype: string

    '''
    model = context['model']
    user = context['user']
    id = get_or_bust(data_dict, "id")

    if not data_dict.get('url'):
        data_dict['url'] = ''

    resource = model.Resource.get(id)
    context["resource"] = resource
    old_resource_format = resource.format

    if not resource:
        log.debug('Could not find resource %s', id)
        raise NotFound(_('Resource was not found.'))

    check_access('resource_update', context, data_dict)
    del context["resource"]

    package_id = resource.package.id
    pkg_dict = get_action('package_show')(dict(context, return_type='dict'),
                                           {'id': package_id})

    for n, p in enumerate(pkg_dict['resources']):
        if p['id'] == id:
            break
    else:
        log.error('Could not find resource %s after all', id)
        raise NotFound(_('Resource was not found.'))

    # Persist the datastore_active and checksum extras if already present and not provided
    if ('datastore_active' in resource.extras and
            'datastore_active' not in data_dict):
        data_dict['datastore_active'] = resource.extras['datastore_active']
    if ('checksum' in resource.extras and
            'checksum' not in data_dict):
        data_dict['checksum'] = resource.extras['checksum']

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.before_update(context, pkg_dict['resources'][n], data_dict)

    upload = uploader.get_resource_uploader(data_dict)

    if hasattr(upload, 'upload_file'):
        checksum = '%s-%s' % (_calculate_checksum(upload.upload_file), 'resource')

        resources = model.Session.query(model.Resource).\
            filter_by(package_id=pkg_dict['id'], state='active').all()
        for rsc in resources:
            if rsc.extras.get('checksum') == checksum:
                raise ValidationError(
                    {_('message'): [_('Resource already exists')]})

        data_dict['checksum'] = checksum
    elif data_dict.get('url'):
        # if url_type is not upload then it is Link
        if resource.url_type != 'upload':
            data_dict['checksum'] = ''
            _validate_link(data_dict.get('url'))
    else:
        raise ValidationError({_('message'): [_('Resource file is missing')]})

    if 'mimetype' not in data_dict:
        if hasattr(upload, 'mimetype'):
            data_dict['mimetype'] = upload.mimetype

    if 'size' not in data_dict and 'url_type' in data_dict:
        if hasattr(upload, 'filesize'):
            data_dict['size'] = upload.filesize

    pkg_dict['resources'][n] = data_dict

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        updated_pkg_dict = get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError as e:
        try:
            raise ValidationError(e.error_dict['resources'][-1])
        except (KeyError, IndexError):
            raise ValidationError(e.error_dict)

    upload.upload(id, uploader.get_max_resource_size())
    model.repo.commit()

    resource = get_action('resource_show')(context, {'id': id})

    if old_resource_format != resource['format']:
        get_action('resource_create_default_resource_views')(
            {'model': context['model'], 'user': context['user'],
             'ignore_auth': True},
            {'package': updated_pkg_dict,
             'resource': resource})

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_update(context, resource)

    try:
        update_package_stats(resource['package_id'])
    except Exception as e:
        log.error(e)
        log.exception(e)

    return resource
Esempio n. 14
0
def _save_ueb_package_as_dataset(service_call_results,
                                 model_config_dataset_id):
    source = 'uebpackage.tasks._save_ueb_package_as_dataset():'
    ckan_default_dir = uebhelper.StringSettings.ckan_user_session_temp_dir  # '/tmp/ckan'

    # get the matching model configuration dataset object
    model_config_dataset_obj = base.model.Package.get(model_config_dataset_id)
    model_config_dataset_title = model_config_dataset_obj.title
    model_config_dataset_owner_org = model_config_dataset_obj.owner_org
    model_config_dataset_author = model_config_dataset_obj.author

    # create a directory for saving the file
    # this will be a dir in the form of: /tmp/ckan/{random_id}
    random_id = base.model.types.make_uuid()
    destination_dir = os.path.join(ckan_default_dir, random_id)
    os.makedirs(destination_dir)

    model_pkg_filename = uebhelper.StringSettings.ueb_input_model_package_default_filename  # 'ueb_model_pkg.zip'
    model_pkg_file = os.path.join(destination_dir, model_pkg_filename)

    bytes_to_read = 16 * 1024

    try:
        with open(model_pkg_file, 'wb') as file_obj:
            while True:
                data = service_call_results.read(bytes_to_read)
                if not data:
                    break
                file_obj.write(data)
    except Exception as e:
        log.error(source +
                  'Failed to save the ueb_package zip file to temporary '
                  'location for UEB model configuration dataset ID: %s \n '
                  'Exception: %s' % (model_config_dataset_id, e))
        raise e

    log.info(source +
             'ueb_package zip file was saved to temporary location for '
             'UEB model configuration dataset ID: %s' %
             model_config_dataset_id)

    # upload the file to CKAN file store
    # resource_metadata = _upload_file(model_pkg_file)
    # if resource_metadata:
    #     log.info(source + 'UEB model package zip file was uploaded for model configuration dataset ID:%s' % model_config_dataset_id)
    # else:
    #     log.error(source + 'Failed to upload UEB model package zip file '
    #                        'for model configuration dataset ID: %s' % model_config_dataset_id)
    #     return
    #
    # # retrieve some of the file meta data
    # resource_url = resource_metadata.get('_label')  # this will return datetime stamp/filename
    #
    # resource_url = '/storage/f/' + resource_url
    # if resource_url.startswith('/'):
    #     resource_url = base.config.get('ckan.site_url', '').rstrip('/') + resource_url
    # else:
    #     resource_url = base.config.get('ckan.site_url', '') + resource_url
    #
    # resource_created_date = resource_metadata.get('_creation_date')
    # resource_name = resource_metadata.get('filename_original')
    # resource_size = resource_metadata.get('_content_length')
    #
    # # add the uploaded ueb model pkg data file as a resource to the dataset
    # resource_create_action = tk.get_action('resource_create')
    # context = {'model': base.model, 'session': base.model.Session, 'save': 'save'}
    # user = uebhelper.get_site_user()
    # context['user'] = user.get('name')
    # context['ignore_auth'] = True
    # context['validate'] = False

    user = uebhelper.get_site_user()
    # create a package
    package_create_action = tk.get_action('package_create')

    # create unique package name using the current time stamp as a postfix to any package name
    unique_postfix = datetime.now().isoformat().replace(':', '-').replace(
        '.', '-').lower()
    pkg_title = model_config_dataset_title

    data_dict = {
        'name': 'model_package_' +
        unique_postfix,  # this needs to be unique as required by DB
        'type':
        'model-package',  # dataset type as defined in custom dataset plugin
        'title': pkg_title,
        'owner_org': model_config_dataset_owner_org,
        'author': model_config_dataset_author,
        'notes': 'UEB model package',
        'pkg_model_name': 'UEB',
        'model_version': '1.0',
        'north_extent': '',
        'south_extent': '',
        'east_extent': '',
        'west_extent': '',
        'simulation_start_day': '',
        'simulation_end_day': '',
        'time_step': '',
        'package_type': u'Input',
        'package_run_status': 'Not yet submitted',
        'package_run_job_id': '',
        'dataset_type': 'model-package'
    }

    context = {
        'model': base.model,
        'session': base.model.Session,
        'ignore_auth': True,
        'user': user.get('name'),
        'save': 'save'
    }
    try:
        uebhelper.register_translator(
        )  # this is needed since we are creating a package in a background operation
        pkg_dict = package_create_action(context, data_dict)
        log.info(
            source +
            'A new dataset was created for UEB input model package with name: %s'
            % data_dict['title'])
    except Exception as e:
        log.error(
            source +
            'Failed to create a new dataset for ueb input model package for'
            ' the related model configuration dataset title: %s \n Exception: %s'
            % (pkg_title, e))
        raise e

    pkg_id = pkg_dict['id']

    if not 'resources' in pkg_dict:
        pkg_dict['resources'] = []

    file_name = munge.munge_filename(model_pkg_filename)
    resource = {'url': file_name, 'url_type': 'upload'}
    upload = uploader.ResourceUpload(resource)
    upload.filename = file_name
    upload.upload_file = open(model_pkg_file, 'r')
    data_dict = {
        'format': 'zip',
        'name': file_name,
        'url': file_name,
        'url_type': 'upload'
    }
    pkg_dict['resources'].append(data_dict)

    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        # update the package
        package_update_action = tk.get_action('package_update')
        package_update_action(context, pkg_dict)
        context.pop('defer_commit')
    except Exception as e:
        log.error(
            source +
            ' Failed to update the new dataset for adding the input model package zip file as'
            ' a resource.\n Exception: %s' % e)

        raise e

    # link this newly created model package dataset to the model configuration dataset
    package_relationship_create_action = tk.get_action(
        'package_relationship_create')
    data_dict = {
        'subject': pkg_id,
        'object': model_config_dataset_id,
        'type': 'links_to'
    }
    package_relationship_create_action(context, data_dict)

    # Get out resource_id resource from model as it will not appear in
    # package_show until after commit
    upload.upload(context['package'].resources[-1].id,
                  uploader.get_max_resource_size())
    base.model.repo.commit()

    # update the related model configuration dataset to show that the package is available

    data_dict = {'package_availability': 'Available'}
    update_msg = 'system auto updated ueb package dataset'
    background_task = True
    try:
        updated_package = uebhelper.update_package(model_config_dataset_id,
                                                   data_dict, update_msg,
                                                   background_task)
        log.info(source +
                 'UEB model configuration dataset was updated as a result of '
                 'receiving model input package for dataset:%s' %
                 updated_package['name'])
    except Exception as e:
        log.error(source +
                  'Failed to update UEB model configuration dataset after '
                  'receiving model input package for dataset ID:%s \n'
                  'Exception: %s' % (model_config_dataset_id, e))
        raise e
Esempio n. 15
0
    upload = uploader.ResourceUpload(data_dict)

    pkg_dict["resources"].append(data_dict)

    try:
        context["defer_commit"] = True
        context["use_cache"] = False
        _get_action("package_update")(context, pkg_dict)
        context.pop("defer_commit")
    except ValidationError, e:
        errors = e.error_dict["resources"][-1]
        raise ValidationError(errors)

    ## Get out resource_id resource from model as it will not appear in
    ## package_show until after commit
    upload.upload(context["package"].resources[-1].id, uploader.get_max_resource_size())
    model.repo.commit()

    ##  Run package show again to get out actual last_resource
    pkg_dict = _get_action("package_show")(context, {"id": package_id})
    resource = pkg_dict["resources"][-1]

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_create(context, resource)

    return resource


def resource_view_create(context, data_dict):
    """Creates a new resource view.
Esempio n. 16
0
    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        pkg_dict = _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        # For debugging #1281
        if 'resources' not in e.error_dict:
            raise Exception('resource_update error: %r' %
                            e.error_dict)

        errors = e.error_dict['resources'][n]
        raise ValidationError(errors)

    upload.upload(id, uploader.get_max_resource_size())
    model.repo.commit()
    return _get_action('resource_show')(context, {'id': id})


def package_update(context, data_dict):
    '''Update a dataset (package).

    You must be authorized to edit the dataset and the groups that it belongs
    to.

    Plugins may change the parameters of this function depending on the value
    of the dataset's ``type`` attribute, see the ``IDatasetForm`` plugin
    interface.

    For further parameters see ``package_create()``.
Esempio n. 17
0
    def _save_shape_file_as_resource(self, lat, lon, shape_file_name,
                                     watershed_des, organization):
        source = 'delineate.delineatewatershed._save_shape_file_as_resource():'
        ajax_response = d_helper.AJAXResponse()

        if not self._validate_file_name(shape_file_name):
            ajax_response.success = False
            ajax_response.message = 'Invalid shape file name:%s.' % shape_file_name + '\nFile name needs to have only ' \
                                                                                      'alphanumeric characters and ' \
                                                                                      'dash, hyphen or space characters.'
            return ajax_response.to_json()

        # TODO: make the saving of the file to temp directory a separate function
        ckan_default_dir = d_helper.StringSettings.ckan_user_session_temp_dir
        session_id = base.session['id']
        shape_files_source_dir = os.path.join(ckan_default_dir, session_id,
                                              'ShapeFiles')
        target_zip_dir = os.path.join(ckan_default_dir, session_id,
                                      'ShapeZippedFile')
        shape_zip_file = os.path.join(target_zip_dir, shape_file_name + '.zip')

        if not os.path.isdir(shape_files_source_dir):
            log.error(
                source +
                'CKAN error: Expected shape file source dir path (%s) is missing.'
                % shape_files_source_dir)

            ajax_response.success = False
            ajax_response.message = _(
                'Failed to save the watershed shape file.')
            return ajax_response.to_json()

        if not os.path.exists(shape_zip_file):
            #create the watershed zip file first
            if os.path.isdir(target_zip_dir):
                shutil.rmtree(target_zip_dir)

            os.makedirs(target_zip_dir)
            files_to_archive = shape_files_source_dir + '/' + 'Watershed.*'
            zipper = zipfile.ZipFile(shape_zip_file, 'w')
            for file_to_zip in glob.glob(files_to_archive):
                zipper.write(file_to_zip,
                             os.path.basename(file_to_zip),
                             compress_type=zipfile.ZIP_DEFLATED)

            zipper.close()

        # TODO: make the creation of a new package a new function
        # create a package
        package_create_action = tk.get_action('package_create')

        # create unique package name using the current time stamp as a postfix to any package name
        unique_postfix = datetime.now().isoformat().replace(':', '-').replace(
            '.', '-').lower()
        pkg_title = shape_file_name  # + '_'
        pkg_name = shape_file_name.replace(' ', '-').lower()
        data_dict = {
            'name':
            pkg_name + '_' + unique_postfix,
            'type':
            'geographic-feature-set',
            'title':
            pkg_title,
            'author':
            tk.c.userObj.name if tk.c.userObj else tk.c.
            author,  # TODO: userObj is None always. Need to retrieve user full name
            'notes':
            'This is a dataset that contains a watershed shape zip file for an outlet'
            ' location at latitude:%s and longitude:%s. ' % (lat, lon) +
            watershed_des,
            'owner_org':
            organization,
            'variable_name':
            '',  # extra metadata field begins from here
            'variable_unit':
            '',
            'north_extent':
            '',
            'south_extent':
            '',
            'east_extent':
            '',
            'west_extent':
            '',
            'projection':
            'WGS_1984',  # this what our delineation service sets for the watershed
            'dataset_type':
            'geographic-feature-set'
        }

        context = {
            'model': base.model,
            'session': base.model.Session,
            'user': tk.c.user or tk.c.author,
            'save': 'save'
        }
        try:
            pkg_dict = package_create_action(context, data_dict)
            log.info(source + 'A new dataset was created with name: %s' %
                     data_dict['title'])
        except Exception as e:
            log.error(
                source +
                'Failed to create a new dataset for saving watershed shape file as'
                ' a resource.\n Exception: %s' % e)

            ajax_response.success = False
            ajax_response.message = _(
                'Failed to create a new dataset for'
                ' saving watershed shape file as a resource.')
            return ajax_response.to_json()

        # TODO: make the add resource to a package a new function
        if not 'resources' in pkg_dict:
            pkg_dict['resources'] = []

        file_name = munge.munge_filename(shape_file_name + '.zip')
        resource = {'url': file_name, 'url_type': 'upload'}
        upload = uploader.ResourceUpload(resource)
        upload.filename = file_name
        upload.upload_file = open(shape_zip_file, 'r')
        data_dict = {
            'format': 'zip',
            'name': file_name,
            'url': file_name,
            'url_type': 'upload'
        }
        pkg_dict['resources'].append(data_dict)

        try:
            context['defer_commit'] = True
            context['use_cache'] = False
            # update the package
            package_update_action = tk.get_action('package_update')
            package_update_action(context, pkg_dict)
            context.pop('defer_commit')
        except Exception as e:
            log.error(
                source +
                'Failed to update the new dataset for adding watershed shape file as'
                ' a resource.\n Exception: %s' % e)

            ajax_response.success = False
            ajax_response.message = _(
                'Failed to save watershed shape file as a resource.')
            return ajax_response.to_json()

        # Get out resource_id resource from model as it will not appear in
        # package_show until after commit
        upload.upload(context['package'].resources[-1].id,
                      uploader.get_max_resource_size())
        base.model.repo.commit()
        ajax_response.success = True
        ajax_response.message = _(
            'Watershed shape file was saved as a resource.')
        return ajax_response.to_json()
Esempio n. 18
0
    def _save_shape_file_as_resource(self, lat, lon, shape_file_name, watershed_des, organization):
        source = 'delineate.delineatewatershed._save_shape_file_as_resource():'
        ajax_response = d_helper.AJAXResponse()

        if not self._validate_file_name(shape_file_name):
            ajax_response.success = False
            ajax_response.message = 'Invalid shape file name:%s.' % shape_file_name + '\nFile name needs to have only ' \
                                                                                      'alphanumeric characters and ' \
                                                                                      'dash, hyphen or space characters.'
            return ajax_response.to_json()

        # TODO: make the saving of the file to temp directory a separate function
        ckan_default_dir = d_helper.StringSettings.ckan_user_session_temp_dir
        session_id = base.session['id']
        shape_files_source_dir = os.path.join(ckan_default_dir, session_id, 'ShapeFiles')
        target_zip_dir = os.path.join(ckan_default_dir, session_id, 'ShapeZippedFile') 
        shape_zip_file = os.path.join(target_zip_dir, shape_file_name + '.zip')

        if not os.path.isdir(shape_files_source_dir):
            log.error(source + 'CKAN error: Expected shape file source dir path (%s) is missing.'
                      % shape_files_source_dir)

            ajax_response.success = False
            ajax_response.message = _('Failed to save the watershed shape file.')
            return ajax_response.to_json()

        if not os.path.exists(shape_zip_file):
            #create the watershed zip file first
            if os.path.isdir(target_zip_dir):
                shutil.rmtree(target_zip_dir)
            
            os.makedirs(target_zip_dir)
            files_to_archive = shape_files_source_dir + '/' + 'Watershed.*'
            zipper = zipfile.ZipFile(shape_zip_file, 'w')
            for file_to_zip in glob.glob(files_to_archive):
                zipper.write(file_to_zip, os.path.basename(file_to_zip), compress_type=zipfile.ZIP_DEFLATED)
            
            zipper.close()

        # TODO: make the creation of a new package a new function
        # create a package
        package_create_action = tk.get_action('package_create')
        
        # create unique package name using the current time stamp as a postfix to any package name
        unique_postfix = datetime.now().isoformat().replace(':', '-').replace('.', '-').lower()
        pkg_title = shape_file_name  # + '_'
        pkg_name = shape_file_name.replace(' ', '-').lower()
        data_dict = {
                    'name': pkg_name + '_' + unique_postfix,
                    'type': 'geographic-feature-set',
                    'title': pkg_title,
                    'author': tk.c.userObj.name if tk.c.userObj else tk.c.author,   # TODO: userObj is None always. Need to retrieve user full name
                    'notes': 'This is a dataset that contains a watershed shape zip file for an outlet'
                             ' location at latitude:%s and longitude:%s. ' % (lat, lon) + watershed_des,
                    'owner_org': organization,
                    'variable_name': '',  # extra metadata field begins from here
                    'variable_unit': '',
                    'north_extent': '',
                    'south_extent': '',
                    'east_extent': '',
                    'west_extent': '',
                    'projection': 'WGS_1984',   # this what our delineation service sets for the watershed
                    'dataset_type': 'geographic-feature-set'
                    }
        
        context = {'model': base.model, 'session': base.model.Session, 'user': tk.c.user or tk.c.author, 'save': 'save'}
        try:
            pkg_dict = package_create_action(context, data_dict)
            log.info(source + 'A new dataset was created with name: %s' % data_dict['title'])
        except Exception as e:
            log.error(source + 'Failed to create a new dataset for saving watershed shape file as'
                               ' a resource.\n Exception: %s' % e)

            ajax_response.success = False
            ajax_response.message = _('Failed to create a new dataset for'
                                      ' saving watershed shape file as a resource.')
            return ajax_response.to_json()

        # TODO: make the add resource to a package a new function
        if not 'resources' in pkg_dict:
            pkg_dict['resources'] = []

        file_name = munge.munge_filename(shape_file_name + '.zip')
        resource = {'url': file_name, 'url_type': 'upload'}
        upload = uploader.ResourceUpload(resource)
        upload.filename = file_name
        upload.upload_file = open(shape_zip_file, 'r')
        data_dict = {'format': 'zip', 'name': file_name, 'url': file_name, 'url_type': 'upload'}
        pkg_dict['resources'].append(data_dict)

        try:
            context['defer_commit'] = True
            context['use_cache'] = False
            # update the package
            package_update_action = tk.get_action('package_update')
            package_update_action(context, pkg_dict)
            context.pop('defer_commit')
        except Exception as e:
            log.error(source + 'Failed to update the new dataset for adding watershed shape file as'
                               ' a resource.\n Exception: %s' % e)

            ajax_response.success = False
            ajax_response.message = _('Failed to save watershed shape file as a resource.')
            return ajax_response.to_json()

        # Get out resource_id resource from model as it will not appear in
        # package_show until after commit
        upload.upload(context['package'].resources[-1].id, uploader.get_max_resource_size())
        base.model.repo.commit()
        ajax_response.success = True
        ajax_response.message = _('Watershed shape file was saved as a resource.')
        return ajax_response.to_json()
        pkg_dict['resources'] = []

    upload = uploader.get_resource_uploader(data_dict)
    pkg_dict['resources'].append(data_dict)
    try:
        context['defer_commit'] = True
        context['use_cache'] = False
        _get_action('package_update')(context, pkg_dict)
        context.pop('defer_commit')
    except ValidationError, e:
        errors = e.error_dict['resources'][-1]
        raise ValidationError(errors)

    ## Get out resource_id resource from model as it will not appear in
    ## package_show until after commit
    remote_filepath = upload.upload(context['package'].resources[-1].id,   uploader.get_max_resource_size())
    if remote_filepath:
        log.debug(remote_filepath)
        pkg_dict['resources'][-1]['url_type'] = ''
        pkg_dict['resources'][-1]['url'] = remote_filepath
    _get_action('package_update')(context, pkg_dict)
    model.repo.commit()

    ##  Run package show again to get out actual last_resource
    updated_pkg_dict = _get_action('package_show')(context, {'id': package_id})
    resource = updated_pkg_dict['resources'][-1]

    for plugin in plugins.PluginImplementations(plugins.IResourceController):
        plugin.after_create(context, resource)

    return resource