Esempio n. 1
0
def file_upload(filename,
                name=None,
                user=None,
                title=None,
                abstract=None,
                license=None,
                category=None,
                keywords=None,
                regions=None,
                date=None,
                skip=True,
                overwrite=False,
                charset='UTF-8',
                metadata_uploaded_preserve=False,
                metadata_upload_form=False):
    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.

    :return: Uploaded layer
    :rtype: Layer
    """
    if keywords is None:
        keywords = []
    if regions is None:
        regions = []

    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace('_', ' ')

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace('-', '_')
    else:
        name = slugify(name)  # assert that name is slugified

    if license is not None:
        licenses = License.objects.filter(
            Q(name__iexact=license) |
            Q(abbreviation__iexact=license) |
            Q(url__iexact=license) |
            Q(description__iexact=license))
        if len(licenses) == 1:
            license = licenses[0]
        else:
            license = None

    if category is not None:
        categories = TopicCategory.objects.filter(
            Q(identifier__iexact=category) |
            Q(gn_description__iexact=category))
        if len(categories) == 1:
            category = categories[0]
        else:
            category = None

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, 'rb') as f:
            upload_session.layerfile_set.create(
                name=type_name, file=File(
                    f, name='%s.%s' %
                    (assigned_name or valid_name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1 = get_bbox(filename)

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    is_approved = True
    is_published = True
    if settings.RESOURCE_PUBLISHING or settings.ADMIN_MODERATE_UPLOADS:
        is_approved = False
        is_published = False

    defaults = {
        'upload_session': upload_session,
        'title': title,
        'abstract': abstract,
        'owner': user,
        'charset': charset,
        'bbox_x0': bbox_x0,
        'bbox_x1': bbox_x1,
        'bbox_y0': bbox_y0,
        'bbox_y1': bbox_y1,
        'is_approved': is_approved,
        'is_published': is_published,
        'license': license,
        'category': category
    }

    # set metadata
    if 'xml' in files:
        with open(files['xml']) as f:
            xml_file = f.read()
        defaults['metadata_uploaded'] = True
        defaults['metadata_uploaded_preserve'] = metadata_uploaded_preserve

        # get model properties from XML
        identifier, vals, regions, keywords = set_metadata(xml_file)

        if defaults['metadata_uploaded_preserve']:
            defaults['metadata_xml'] = xml_file
            defaults['uuid'] = identifier

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'
                defaults[key] = value
            else:
                defaults[key] = value

    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, 'NLP_ENABLED', False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
            nlp_metadata = nlp_extract_metadata_dict({
                'title': defaults.get('title', None),
                'abstract': defaults.get('abstract', None),
                'purpose': defaults.get('purpose', None)})
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get('regions', []))
                keywords.extend(nlp_metadata.get('keywords', []))
        except BaseException:
            print "NLP extraction failed."

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults['storeType'] = 'dataStore'

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults['storeType'] = 'coverageStore'

    # Create a Django object.
    created = False
    layer = None
    with transaction.atomic():
        if not metadata_upload_form:
            layer, created = Layer.objects.get_or_create(
                name=valid_name,
                defaults=defaults
            )
        elif identifier:
            layer, created = Layer.objects.get_or_create(
                uuid=identifier,
                defaults=defaults
            )
        else:
            layer = Layer.objects.get(alternate=title)
            created = False
            overwrite = True

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        if layer.upload_session:
            layer.upload_session.layerfile_set.all().delete()
        if upload_session:
            layer.upload_session = upload_session

        # update with new information
        db_layer = Layer.objects.filter(id=layer.id)

        defaults['upload_session'] = upload_session
        defaults['title'] = defaults.get('title', None) or layer.title
        defaults['abstract'] = defaults.get('abstract', None) or layer.abstract
        defaults['bbox_x0'] = defaults.get('bbox_x0', None) or layer.bbox_x0
        defaults['bbox_x1'] = defaults.get('bbox_x1', None) or layer.bbox_x1
        defaults['bbox_y0'] = defaults.get('bbox_y0', None) or layer.bbox_y0
        defaults['bbox_y1'] = defaults.get('bbox_y1', None) or layer.bbox_y1
        defaults['is_approved'] = defaults.get('is_approved', None) or layer.is_approved
        defaults['is_published'] = defaults.get('is_published', None) or layer.is_published
        defaults['license'] = defaults.get('license', None) or layer.license
        defaults['category'] = defaults.get('category', None) or layer.category

        db_layer.update(**defaults)
        layer.refresh_from_db()

        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite

        # Blank out the store if overwrite is true.
        # geoserver_post_save_signal should upload the new file if needed
        layer.store = ''
        layer.save()

        # set SLD
        # if 'sld' in files:
        #     sld = None
        #     with open(files['sld']) as f:
        #         sld = f.read()
        #     if sld:
        #         set_layer_style(layer, layer.alternate, sld, base_file=files['sld'])

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            if not layer.keywords:
                layer.keywords = keywords
            else:
                layer.keywords.add(*keywords)

    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            if not layer.regions:
                layer.regions = regions_resolved
            else:
                layer.regions.clear()
                layer.regions.add(*regions_resolved)

    # Assign and save the charset using the Layer class' object (layer)
    if charset != 'UTF-8':
        layer.charset = charset
        layer.save()

    to_update = {}
    if defaults.get('title', title) is not None:
        to_update['title'] = defaults.get('title', title)

    if defaults.get('abstract', abstract) is not None:
        to_update['abstract'] = defaults.get('abstract', abstract)

    if defaults.get('date', date) is not None:
        to_update['date'] = defaults.get('date',
                                         datetime.strptime(date, '%Y-%m-%d %H:%M:%S') if date else None)

    if defaults.get('license', license) is not None:
        to_update['license'] = defaults.get('license', license)

    if defaults.get('category', category) is not None:
        to_update['category'] = defaults.get('category', category)

    # Update ResourceBase
    if not to_update:
        pass
    else:
        ResourceBase.objects.filter(
            id=layer.resourcebase_ptr.id).update(
            **to_update)
        Layer.objects.filter(id=layer.id).update(**to_update)

        # Refresh from DB
        layer.refresh_from_db()

    return layer
Esempio n. 2
0
def file_upload(filename,
                name=None,
                user=None,
                title=None,
                abstract=None,
                license=None,
                category=None,
                keywords=None,
                regions=None,
                date=None,
                skip=True,
                overwrite=False,
                charset='UTF-8',
                is_approved=True,
                is_published=True,
                metadata_uploaded_preserve=False,
                metadata_upload_form=False):
    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.

    :return: Uploaded layer
    :rtype: Layer
    """
    if keywords is None:
        keywords = []
    if regions is None:
        regions = []

    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace('_', ' ')

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace('-', '_')
    elif not overwrite:
        name = slugify(name)  # assert that name is slugified

    if license is not None:
        licenses = License.objects.filter(
            Q(name__iexact=license) |
            Q(abbreviation__iexact=license) |
            Q(url__iexact=license) |
            Q(description__iexact=license))
        if len(licenses) == 1:
            license = licenses[0]
        else:
            license = None

    if category is not None:
        try:
            categories = TopicCategory.objects.filter(
                Q(identifier__iexact=category) |
                Q(gn_description__iexact=category))
            if len(categories) == 1:
                category = categories[0]
            else:
                category = None
        except BaseException:
            pass

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, 'rb') as f:
            upload_session.layerfile_set.create(
                name=type_name, file=File(
                    f, name='%s.%s' %
                    (assigned_name or valid_name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1, srid = get_bbox(filename)
    if srid:
        srid_url = "http://www.spatialreference.org/ref/" + srid.replace(':', '/').lower() + "/"  # noqa

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    if not overwrite:
        if settings.RESOURCE_PUBLISHING or settings.ADMIN_MODERATE_UPLOADS:
            is_approved = False
            is_published = False

    defaults = {
        'upload_session': upload_session,
        'title': title,
        'abstract': abstract,
        'owner': user,
        'charset': charset,
        'bbox_x0': bbox_x0,
        'bbox_x1': bbox_x1,
        'bbox_y0': bbox_y0,
        'bbox_y1': bbox_y1,
        'srid': srid,
        'is_approved': is_approved,
        'is_published': is_published,
        'license': license,
        'category': category
    }

    # set metadata
    if 'xml' in files:
        with open(files['xml']) as f:
            xml_file = f.read()

        defaults['metadata_uploaded'] = True

        defaults['metadata_uploaded_preserve'] = metadata_uploaded_preserve

        # get model properties from XML
        identifier, vals, regions, keywords = set_metadata(xml_file)

        if defaults['metadata_uploaded_preserve']:
            defaults['metadata_xml'] = xml_file

            defaults['uuid'] = identifier

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'

                defaults[key] = value
            else:
                defaults[key] = value

    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, 'NLP_ENABLED', False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
            nlp_metadata = nlp_extract_metadata_dict({
                'title': defaults.get('title', None),
                'abstract': defaults.get('abstract', None),
                'purpose': defaults.get('purpose', None)})
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get('regions', []))
                keywords.extend(nlp_metadata.get('keywords', []))
        except BaseException:
            logger.error("NLP extraction failed.")

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults['storeType'] = 'dataStore'

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults['storeType'] = 'coverageStore'

    # Create a Django object.
    created = False
    layer = None
    with transaction.atomic():
        try:
            if overwrite:
                try:
                    layer = Layer.objects.get(name=valid_name)
                except Layer.DoesNotExist:
                    layer = None
            if not layer:
                if not metadata_upload_form:
                    layer, created = Layer.objects.get_or_create(
                        name=valid_name,
                        workspace=settings.DEFAULT_WORKSPACE,
                        defaults=defaults
                    )
                elif identifier:
                    layer, created = Layer.objects.get_or_create(
                        uuid=identifier,
                        defaults=defaults
                    )
        except BaseException:
            raise

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        # update with new information
        defaults['upload_session'] = upload_session

        defaults['title'] = defaults.get('title', None) or layer.title

        defaults['abstract'] = defaults.get('abstract', None) or layer.abstract

        defaults['bbox_x0'] = defaults.get('bbox_x0', None) or layer.bbox_x0

        defaults['bbox_x1'] = defaults.get('bbox_x1', None) or layer.bbox_x1

        defaults['bbox_y0'] = defaults.get('bbox_y0', None) or layer.bbox_y0

        defaults['bbox_y1'] = defaults.get('bbox_y1', None) or layer.bbox_y1

        defaults['is_approved'] = defaults.get(
            'is_approved', is_approved) or layer.is_approved

        defaults['is_published'] = defaults.get(
            'is_published', is_published) or layer.is_published

        defaults['license'] = defaults.get('license', None) or layer.license

        defaults['category'] = defaults.get('category', None) or layer.category

        try:
            Layer.objects.filter(id=layer.id).update(**defaults)
            layer.refresh_from_db()
        except Layer.DoesNotExist:
            import traceback
            tb = traceback.format_exc()
            logger.error(tb)
            raise

        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite

        # Blank out the store if overwrite is true.
        # geoserver_post_save_signal should upload the new file if needed
        layer.store = '' if overwrite else layer.store
        layer.save()

        if upload_session:
            upload_session.resource = layer
            upload_session.processed = True
            upload_session.save()

        # set SLD
        # if 'sld' in files:
        #     sld = None
        #     with open(files['sld']) as f:
        #         sld = f.read()
        #     if sld:
        #         set_layer_style(layer, layer.alternate, sld, base_file=files['sld'])

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            if not layer.keywords:
                layer.keywords = keywords
            else:
                layer.keywords.add(*keywords)

    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            if not layer.regions:
                layer.regions = regions_resolved
            else:
                layer.regions.clear()
                layer.regions.add(*regions_resolved)

    # Assign and save the charset using the Layer class' object (layer)
    if charset != 'UTF-8':
        layer.charset = charset
        layer.save()

    to_update = {}
    if defaults.get('title', title) is not None:
        to_update['title'] = defaults.get('title', title)
    if defaults.get('abstract', abstract) is not None:
        to_update['abstract'] = defaults.get('abstract', abstract)
    if defaults.get('date', date) is not None:
        to_update['date'] = defaults.get('date',
                                         datetime.strptime(date, '%Y-%m-%d %H:%M:%S') if date else None)
    if defaults.get('license', license) is not None:
        to_update['license'] = defaults.get('license', license)
    if defaults.get('category', category) is not None:
        to_update['category'] = defaults.get('category', category)

    # Update ResourceBase
    if not to_update:
        pass
    else:
        try:
            ResourceBase.objects.filter(
                id=layer.resourcebase_ptr.id).update(
                **to_update)
            Layer.objects.filter(id=layer.id).update(**to_update)

            # Refresh from DB
            layer.refresh_from_db()
        except BaseException:
            import traceback
            tb = traceback.format_exc()
            logger.error(tb)

    return layer
Esempio n. 3
0
def file_upload(filename,
                name=None,
                user=None,
                title=None,
                abstract=None,
                keywords=[],
                category=None,
                regions=[],
                date=None,
                skip=True,
                overwrite=False,
                charset='UTF-8',
                metadata_uploaded_preserve=False):
    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.
    """
    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace('_', ' ')

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace('-', '_')

    if category is not None:
        categories = TopicCategory.objects.filter(
            Q(identifier__iexact=category)
            | Q(gn_description__iexact=category))
        if len(categories) == 1:
            category = categories[0]
        else:
            category = None

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, 'rb') as f:
            upload_session.layerfile_set.create(
                name=type_name,
                file=File(f,
                          name='%s.%s' %
                          (assigned_name or valid_name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1 = get_bbox(filename)

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    is_published = True
    if settings.RESOURCE_PUBLISHING:
        is_published = False

    defaults = {
        'upload_session': upload_session,
        'title': title,
        'abstract': abstract,
        'owner': user,
        'charset': charset,
        'bbox_x0': bbox_x0,
        'bbox_x1': bbox_x1,
        'bbox_y0': bbox_y0,
        'bbox_y1': bbox_y1,
        'is_published': is_published,
        'category': category
    }

    # set metadata
    if 'xml' in files:
        with open(files['xml']) as f:
            xml_file = f.read()
        defaults['metadata_uploaded'] = True
        defaults['metadata_uploaded_preserve'] = metadata_uploaded_preserve

        # get model properties from XML
        identifier, vals, regions, keywords = set_metadata(xml_file)

        if defaults['metadata_uploaded_preserve']:
            defaults['metadata_xml'] = xml_file
            defaults['uuid'] = identifier

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={
                        'description': '',
                        'gn_description': value
                    })
                key = 'category'
                defaults[key] = value
            else:
                defaults[key] = value

    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, 'NLP_ENABLED', False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
            nlp_metadata = nlp_extract_metadata_dict({
                'title':
                defaults.get('title', None),
                'abstract':
                defaults.get('abstract', None),
                'purpose':
                defaults.get('purpose', None)
            })
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get('regions', []))
                keywords.extend(nlp_metadata.get('keywords', []))
        except:
            print "NLP extraction failed."

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults['storeType'] = 'dataStore'

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults['storeType'] = 'coverageStore'

    # Create a Django object.
    layer, created = Layer.objects.get_or_create(name=valid_name,
                                                 defaults=defaults)

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        layer.upload_session.layerfile_set.all().delete()
        layer.upload_session = upload_session
        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite
        layer.save()

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            layer.keywords.add(*keywords)

    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            layer.regions.add(*regions_resolved)

    if date is not None:
        layer.date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S')
        layer.save()

    return layer
Esempio n. 4
0
def file_upload(filename, name=None, user=None, title=None, abstract=None,
                keywords=[], category=None, regions=[], date=None,
                skip=True, overwrite=False, charset='UTF-8',
                metadata_uploaded_preserve=False):

    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.
    """
    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace('_', ' ')

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace('-', '_')

    if category is not None:
        categories = TopicCategory.objects.filter(Q(identifier__iexact=category) | Q(gn_description__iexact=category))
        if len(categories) == 1:
            category = categories[0]
        else:
            category = None

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, 'rb') as f:
            upload_session.layerfile_set.create(name=type_name,
                                                file=File(f, name='%s.%s' % (assigned_name or valid_name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1 = get_bbox(filename)

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    is_published = True
    if settings.RESOURCE_PUBLISHING:
        is_published = False

    defaults = {
        'upload_session': upload_session,
        'title': title,
        'abstract': abstract,
        'owner': user,
        'charset': charset,
        'bbox_x0': bbox_x0,
        'bbox_x1': bbox_x1,
        'bbox_y0': bbox_y0,
        'bbox_y1': bbox_y1,
        'is_published': is_published,
        'category': category
    }

    # set metadata
    if 'xml' in files:
        with open(files['xml']) as f:
            xml_file = f.read()
        defaults['metadata_uploaded'] = True
        defaults['metadata_uploaded_preserve'] = metadata_uploaded_preserve

        # get model properties from XML
        identifier, vals, regions, keywords = set_metadata(xml_file)

        if defaults['metadata_uploaded_preserve']:
            defaults['metadata_xml'] = xml_file
            defaults['uuid'] = identifier

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'
                defaults[key] = value
            else:
                defaults[key] = value

    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, 'NLP_ENABLED', False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
            nlp_metadata = nlp_extract_metadata_dict({
                'title': defaults.get('title', None),
                'abstract': defaults.get('abstract', None),
                'purpose': defaults.get('purpose', None)})
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get('regions', []))
                keywords.extend(nlp_metadata.get('keywords', []))
        except:
            print "NLP extraction failed."

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults['storeType'] = 'dataStore'

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults['storeType'] = 'coverageStore'

    # Create a Django object.
    layer, created = Layer.objects.get_or_create(
        name=valid_name,
        defaults=defaults
    )

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        layer.upload_session.layerfile_set.all().delete()
        layer.upload_session = upload_session
        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite
        layer.save()

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            layer.keywords.add(*keywords)

    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            layer.regions.add(*regions_resolved)

    if date is not None:
        layer.date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S')
        layer.save()

    return layer
Esempio n. 5
0
def final_step(upload_session, user):
    from geonode.geoserver.helpers import get_sld_for
    import_session = upload_session.import_session
    _log('Reloading session %s to check validity', import_session.id)
    import_session = import_session.reload()
    upload_session.import_session = import_session

    # the importer chooses an available featuretype name late in the game need
    # to verify the resource.name otherwise things will fail.  This happens
    # when the same data is uploaded a second time and the default name is
    # chosen

    cat = gs_catalog
    cat._cache.clear()

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py

    task = import_session.tasks[0]

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    _log('Getting from catalog [%s]', name)
    publishing = cat.get_layer(name)

    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception('unknown item state: %s' % task.state)
    elif import_session.state == 'READY':
        import_session.commit()
    elif import_session.state == 'PENDING':
        if task.state == 'READY':
            # if not task.data.format or task.data.format != 'Shapefile':
            import_session.commit()

    if not publishing:
        raise LayerNotReady(
            "Expected to find layer named '%s' in geoserver" %
            name)

    _log('Creating style for [%s]', name)
    # get_files will not find the sld if it doesn't match the base name
    # so we've worked around that in the view - if provided, it will be here
    if upload_session.import_sld_file:
        _log('using provided sld file')
        base_file = upload_session.base_file
        sld_file = base_file[0].sld_files[0]

        f = None
        if os.path.isfile(sld_file):
            try:
                f = open(sld_file, 'r')
            except BaseException:
                pass
        elif upload_session.tempdir and os.path.exists(upload_session.tempdir):
            tempdir = upload_session.tempdir
            if os.path.isfile(os.path.join(tempdir, sld_file)):
                try:
                    f = open(os.path.join(tempdir, sld_file), 'r')
                except BaseException:
                    pass

        if f:
            sld = f.read()
            f.close()
        else:
            sld = get_sld_for(cat, publishing)
    else:
        sld = get_sld_for(cat, publishing)

    style = None
    if sld is not None:
        try:
            cat.create_style(
                name,
                sld,
                raw=True,
                workspace=settings.DEFAULT_WORKSPACE)
        except geoserver.catalog.ConflictingDataError as e:
            msg = 'There was already a style named %s in GeoServer, try using another name: "%s"' % (
                name, str(e))
            try:
                cat.create_style(
                    name + '_layer',
                    sld,
                    raw=True,
                    workspace=settings.DEFAULT_WORKSPACE)
            except geoserver.catalog.ConflictingDataError as e:
                msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % (
                    name, str(e))
                logger.error(msg)
                e.args = (msg,)

        if style is None:
            try:
                style = cat.get_style(
                    name, workspace=settings.DEFAULT_WORKSPACE) or cat.get_style(name)
            except BaseException:
                logger.warn('Could not retreive the Layer default Style name')
                # what are we doing with this var?
                msg = 'No style could be created for the layer, falling back to POINT default one'
                try:
                    style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \
                        cat.get_style(name + '_layer')
                except BaseException:
                    style = cat.get_style('point')
                    logger.warn(msg)
                    e.args = (msg,)

        if style:
            publishing.default_style = style
            _log('default style set to %s', name)
            cat.save(publishing)

    _log('Creating Django record for [%s]', name)
    target = task.target
    alternate = task.get_target_layer_name()
    layer_uuid = str(uuid.uuid1())

    title = upload_session.layer_title
    abstract = upload_session.layer_abstract

    # @todo hacking - any cached layers might cause problems (maybe
    # delete hook on layer should fix this?)
    cat._cache.clear()

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    if upload_session.mosaic:
        import pytz
        import datetime
        from geonode.layers.models import TIME_REGEX_FORMAT

        # llbbox = publishing.resource.latlon_bbox
        start = None
        end = None
        if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
            has_time = True
            start = datetime.datetime.strptime(upload_session.mosaic_time_value,
                                               TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
            start = pytz.utc.localize(start, is_dst=False)
            end = start
        else:
            has_time = False

        if not upload_session.append_to_mosaic_opts:
            saved_layer, created = Layer.objects.get_or_create(
                name=task.layer.name,
                defaults=dict(store=target.name,
                              storeType=target.store_type,
                              alternate=alternate,
                              workspace=target.workspace_name,
                              title=title,
                              uuid=layer_uuid,
                              abstract=abstract or '',
                              owner=user,),
                temporal_extent_start=start,
                temporal_extent_end=end,
                is_mosaic=True,
                has_time=has_time,
                has_elevation=False,
                time_regex=upload_session.mosaic_time_regex
            )
        else:
            # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name)
            # created = False
            saved_layer, created = Layer.objects.get_or_create(
                name=upload_session.append_to_mosaic_name)
            try:
                if saved_layer.temporal_extent_start and end:
                    if pytz.utc.localize(
                            saved_layer.temporal_extent_start,
                            is_dst=False) < end:
                        saved_layer.temporal_extent_end = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_end=end)
                    else:
                        saved_layer.temporal_extent_start = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_start=end)
            except Exception as e:
                _log(
                    'There was an error updating the mosaic temporal extent: ' +
                    str(e))
    else:
        _has_time = (True if upload_session.time and upload_session.time_info and
                     upload_session.time_transforms else False)
        saved_layer, created = Layer.objects.get_or_create(
            name=task.layer.name,
            defaults=dict(store=target.name,
                          storeType=target.store_type,
                          alternate=alternate,
                          workspace=target.workspace_name,
                          title=title,
                          uuid=layer_uuid,
                          abstract=abstract or '',
                          owner=user,),
            has_time=_has_time
        )

    # Should we throw a clearer error here?
    assert saved_layer is not None

    # @todo if layer was not created, need to ensure upload target is
    # same as existing target

    _log('layer was created : %s', created)

    if created:
        saved_layer.set_default_permissions()
        saved_layer.handle_moderated_uploads()

    # Create the points of contact records for the layer
    _log('Creating points of contact records for [%s]', name)
    saved_layer.poc = user
    saved_layer.metadata_author = user

    # look for xml
    defaults = {}
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        saved_layer.metadata_uploaded = True
        # get model properties from XML
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r')
            zf.extract(xml_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            xml_file[0] = os.path.dirname(archive) + '/' + xml_file[0]
        identifier, vals, regions, keywords = set_metadata(
            open(xml_file[0]).read())

        saved_layer.metadata_xml = xml_file[0]
        regions_resolved, regions_unresolved = resolve_regions(regions)
        keywords.extend(regions_unresolved)

        if getattr(settings, 'NLP_ENABLED', False):
            try:
                from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
                nlp_metadata = nlp_extract_metadata_dict({
                    'title': defaults.get('title', None),
                    'abstract': defaults.get('abstract', None),
                    'purpose': defaults.get('purpose', None)})
                if nlp_metadata:
                    regions_resolved.extend(nlp_metadata.get('regions', []))
                    keywords.extend(nlp_metadata.get('keywords', []))
            except BaseException:
                print "NLP extraction failed."

        # Assign the regions (needs to be done after saving)
        regions_resolved = list(set(regions_resolved))
        if regions_resolved:
            if len(regions_resolved) > 0:
                if not saved_layer.regions:
                    saved_layer.regions = regions_resolved
                else:
                    saved_layer.regions.clear()
                    saved_layer.regions.add(*regions_resolved)

        # Assign the keywords (needs to be done after saving)
        keywords = list(set(keywords))
        if keywords:
            if len(keywords) > 0:
                if not saved_layer.keywords:
                    saved_layer.keywords = keywords
                else:
                    saved_layer.keywords.add(*keywords)

        # set model properties
        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'
                defaults[key] = value
            else:
                defaults[key] = value

        # update with new information
        db_layer = Layer.objects.filter(id=saved_layer.id)
        db_layer.update(**defaults)
        saved_layer.refresh_from_db()

        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        saved_layer.overwrite = True
        saved_layer.save()

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r')
            zf.extract(sld_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            sld_file[0] = os.path.dirname(archive) + '/' + sld_file[0]
        sld = open(sld_file[0]).read()
        set_layer_style(
            saved_layer,
            saved_layer.alternate,
            sld,
            base_file=sld_file[0])

    # Set default permissions on the newly created layer
    # FIXME: Do this as part of the post_save hook

    permissions = upload_session.permissions
    if created and permissions is not None:
        _log('Setting default permissions for [%s]', name)
        saved_layer.set_permissions(permissions)

    if upload_session.tempdir and os.path.exists(upload_session.tempdir):
        shutil.rmtree(upload_session.tempdir)

    upload = Upload.objects.get(import_id=import_session.id)
    upload.layer = saved_layer
    upload.complete = True
    upload.save()

    if upload_session.time_info:
        set_time_info(saved_layer, **upload_session.time_info)

    signals.upload_complete.send(sender=final_step, layer=saved_layer)

    saved_layer.save()
    return saved_layer
Esempio n. 6
0
def final_step(upload_session, user):
    from geonode.geoserver.helpers import get_sld_for
    import_session = upload_session.import_session
    _log('Reloading session %s to check validity', import_session.id)
    import_session = import_session.reload()
    upload_session.import_session = import_session

    # the importer chooses an available featuretype name late in the game need
    # to verify the resource.name otherwise things will fail.  This happens
    # when the same data is uploaded a second time and the default name is
    # chosen

    cat = gs_catalog
    cat._cache.clear()

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py

    task = import_session.tasks[0]

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    _log('Getting from catalog [%s]', name)
    publishing = cat.get_layer(name)

    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            raise Exception('unknown item state: %s' % task.state)
    elif import_session.state == 'READY':
        import_session.commit()
    elif import_session.state == 'PENDING':
        if task.state == 'READY':
            # if not task.data.format or task.data.format != 'Shapefile':
            import_session.commit()

    if not publishing:
        raise LayerNotReady(
            "Expected to find layer named '%s' in geoserver" %
            name)

    _log('Creating style for [%s]', name)
    # get_files will not find the sld if it doesn't match the base name
    # so we've worked around that in the view - if provided, it will be here
    if upload_session.import_sld_file:
        _log('using provided sld file')
        base_file = upload_session.base_file
        sld_file = base_file[0].sld_files[0]

        f = None
        if os.path.isfile(sld_file):
            try:
                f = open(sld_file, 'r')
            except BaseException:
                pass
        elif upload_session.tempdir and os.path.exists(upload_session.tempdir):
            tempdir = upload_session.tempdir
            if os.path.isfile(os.path.join(tempdir, sld_file)):
                try:
                    f = open(os.path.join(tempdir, sld_file), 'r')
                except BaseException:
                    pass

        if f:
            sld = f.read()
            f.close()
        else:
            sld = get_sld_for(cat, publishing)
    else:
        sld = get_sld_for(cat, publishing)

    style = None
    if sld is not None:
        try:
            cat.create_style(
                name,
                sld,
                raw=True,
                workspace=settings.DEFAULT_WORKSPACE)
        except geoserver.catalog.ConflictingDataError as e:
            msg = 'There was already a style named %s in GeoServer, try using another name: "%s"' % (
                name, str(e))
            try:
                cat.create_style(
                    name + '_layer',
                    sld,
                    raw=True,
                    workspace=settings.DEFAULT_WORKSPACE)
            except geoserver.catalog.ConflictingDataError as e:
                msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % (
                    name, str(e))
                logger.error(msg)
                e.args = (msg,)

        if style is None:
            try:
                style = cat.get_style(
                    name, workspace=settings.DEFAULT_WORKSPACE) or cat.get_style(name)
            except BaseException:
                logger.warn('Could not retreive the Layer default Style name')
                # what are we doing with this var?
                msg = 'No style could be created for the layer, falling back to POINT default one'
                try:
                    style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \
                        cat.get_style(name + '_layer')
                except BaseException:
                    style = cat.get_style('point')
                    logger.warn(msg)
                    e.args = (msg,)

        if style:
            publishing.default_style = style
            _log('default style set to %s', name)
            cat.save(publishing)

    _log('Creating Django record for [%s]', name)
    target = task.target
    alternate = task.get_target_layer_name()
    layer_uuid = str(uuid.uuid1())

    title = upload_session.layer_title
    abstract = upload_session.layer_abstract

    # @todo hacking - any cached layers might cause problems (maybe
    # delete hook on layer should fix this?)
    cat._cache.clear()

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    if upload_session.mosaic:
        import pytz
        import datetime
        from geonode.layers.models import TIME_REGEX_FORMAT

        # llbbox = publishing.resource.latlon_bbox
        start = None
        end = None
        if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
            has_time = True
            start = datetime.datetime.strptime(upload_session.mosaic_time_value,
                                               TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
            start = pytz.utc.localize(start, is_dst=False)
            end = start
        else:
            has_time = False

        if not upload_session.append_to_mosaic_opts:
            saved_layer, created = Layer.objects.get_or_create(
                name=task.layer.name,
                defaults=dict(store=target.name,
                              storeType=target.store_type,
                              alternate=alternate,
                              workspace=target.workspace_name,
                              title=title,
                              uuid=layer_uuid,
                              abstract=abstract or '',
                              owner=user,),
                temporal_extent_start=start,
                temporal_extent_end=end,
                is_mosaic=True,
                has_time=has_time,
                has_elevation=False,
                time_regex=upload_session.mosaic_time_regex
            )
        else:
            # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name)
            # created = False
            saved_layer, created = Layer.objects.get_or_create(
                name=upload_session.append_to_mosaic_name)
            try:
                if saved_layer.temporal_extent_start and end:
                    if pytz.utc.localize(
                            saved_layer.temporal_extent_start,
                            is_dst=False) < end:
                        saved_layer.temporal_extent_end = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_end=end)
                    else:
                        saved_layer.temporal_extent_start = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_start=end)
            except Exception as e:
                _log(
                    'There was an error updating the mosaic temporal extent: ' +
                    str(e))
    else:
        _has_time = (True if upload_session.time and upload_session.time_info and
                     upload_session.time_transforms else False)
        saved_layer, created = Layer.objects.get_or_create(
            name=task.layer.name,
            defaults=dict(store=target.name,
                          storeType=target.store_type,
                          alternate=alternate,
                          workspace=target.workspace_name,
                          title=title,
                          uuid=layer_uuid,
                          abstract=abstract or '',
                          owner=user,),
            has_time=_has_time
        )

    # Should we throw a clearer error here?
    assert saved_layer is not None

    # Create a new upload session
    geonode_upload_session = UploadSession.objects.create(resource=saved_layer, user=user)

    # Add them to the upload session (new file fields are created).
    assigned_name = None

    def _store_file(saved_layer,
                    geonode_upload_session,
                    base_file,
                    assigned_name,
                    base=False):
        with open(base_file, 'rb') as f:
            file_name, type_name = os.path.splitext(os.path.basename(base_file))
            geonode_upload_session.layerfile_set.create(
                name=file_name,
                base=base,
                file=File(
                    f, name='%s%s' %
                    (assigned_name or saved_layer.name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = geonode_upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

            return assigned_name

    if upload_session.base_file:
        uploaded_files = upload_session.base_file[0]
        base_file = uploaded_files.base_file
        aux_files = uploaded_files.auxillary_files
        sld_files = uploaded_files.sld_files
        xml_files = uploaded_files.xml_files

        assigned_name = _store_file(saved_layer,
                                    geonode_upload_session,
                                    base_file,
                                    assigned_name,
                                    base=True)

        for _f in aux_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

        for _f in sld_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

        for _f in xml_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

    # @todo if layer was not created, need to ensure upload target is
    # same as existing target

    _log('layer was created : %s', created)

    if created:
        saved_layer.set_default_permissions()
        saved_layer.handle_moderated_uploads()

    # Create the points of contact records for the layer
    _log('Creating points of contact records for [%s]', name)
    saved_layer.poc = user
    saved_layer.metadata_author = user

    # look for xml
    defaults = {}
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        saved_layer.metadata_uploaded = True
        # get model properties from XML
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r')
            zf.extract(xml_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            xml_file[0] = os.path.dirname(archive) + '/' + xml_file[0]
        identifier, vals, regions, keywords = set_metadata(
            open(xml_file[0]).read())

        saved_layer.metadata_xml = xml_file[0]
        regions_resolved, regions_unresolved = resolve_regions(regions)
        keywords.extend(regions_unresolved)

        if getattr(settings, 'NLP_ENABLED', False):
            try:
                from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
                nlp_metadata = nlp_extract_metadata_dict({
                    'title': defaults.get('title', None),
                    'abstract': defaults.get('abstract', None),
                    'purpose': defaults.get('purpose', None)})
                if nlp_metadata:
                    regions_resolved.extend(nlp_metadata.get('regions', []))
                    keywords.extend(nlp_metadata.get('keywords', []))
            except BaseException:
                print "NLP extraction failed."

        # Assign the regions (needs to be done after saving)
        regions_resolved = list(set(regions_resolved))
        if regions_resolved:
            if len(regions_resolved) > 0:
                if not saved_layer.regions:
                    saved_layer.regions = regions_resolved
                else:
                    saved_layer.regions.clear()
                    saved_layer.regions.add(*regions_resolved)

        # Assign the keywords (needs to be done after saving)
        keywords = list(set(keywords))
        if keywords:
            if len(keywords) > 0:
                if not saved_layer.keywords:
                    saved_layer.keywords = keywords
                else:
                    saved_layer.keywords.add(*keywords)

        # set model properties
        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'

                defaults[key] = value
            else:
                defaults[key] = value

        # update with new information
        db_layer = Layer.objects.filter(id=saved_layer.id)
        db_layer.update(**defaults)
        saved_layer.refresh_from_db()

        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        saved_layer.overwrite = True
        saved_layer.save()

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r')
            zf.extract(sld_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            sld_file[0] = os.path.dirname(archive) + '/' + sld_file[0]
        sld = open(sld_file[0]).read()
        set_layer_style(
            saved_layer,
            saved_layer.alternate,
            sld,
            base_file=sld_file[0])

    # Set default permissions on the newly created layer
    # FIXME: Do this as part of the post_save hook

    permissions = upload_session.permissions
    if created and permissions is not None:
        _log('Setting default permissions for [%s]', name)
        saved_layer.set_permissions(permissions)

    if upload_session.tempdir and os.path.exists(upload_session.tempdir):
        shutil.rmtree(upload_session.tempdir)

    upload = Upload.objects.get(import_id=import_session.id)
    upload.layer = saved_layer
    upload.complete = True
    upload.save()

    if upload_session.time_info:
        set_time_info(saved_layer, **upload_session.time_info)

    if geonode_upload_session:
        geonode_upload_session.processed = True
        saved_layer.upload_session = geonode_upload_session

    signals.upload_complete.send(sender=final_step, layer=saved_layer)

    geonode_upload_session.save()
    saved_layer.save()

    cat._cache.clear()
    cat.reload()

    return saved_layer
Esempio n. 7
0
def file_upload(
    filename,
    name=None,
    user=None,
    title=None,
    abstract=None,
    keywords=[],
    category=None,
    regions=[],
    skip=True,
    overwrite=False,
    charset="UTF-8",
):
    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.
    """
    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace("_", " ")

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace("-", "_")

    if category is not None:
        categories = TopicCategory.objects.filter(Q(identifier__iexact=category) | Q(gn_description__iexact=category))
        if len(categories) == 1:
            category = categories[0]
        else:
            category = None

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, "rb") as f:
            upload_session.layerfile_set.create(
                name=type_name, file=File(f, name="%s.%s" % (assigned_name or valid_name, type_name))
            )
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1 = get_bbox(filename)

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    is_published = True
    if settings.RESOURCE_PUBLISHING:
        is_published = False

    defaults = {
        "upload_session": upload_session,
        "title": title,
        "abstract": abstract,
        "owner": user,
        "charset": charset,
        "bbox_x0": bbox_x0,
        "bbox_x1": bbox_x1,
        "bbox_y0": bbox_y0,
        "bbox_y1": bbox_y1,
        "is_published": is_published,
        "category": category,
    }

    # set metadata
    if "xml" in files:
        xml_file = open(files["xml"])
        defaults["metadata_uploaded"] = True
        # get model properties from XML
        vals, regions, keywords = set_metadata(xml_file.read())

        for key, value in vals.items():
            if key == "spatial_representation_type":
                value = SpatialRepresentationType(identifier=value)
            elif key == "topic_category":
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(), defaults={"description": "", "gn_description": value}
                )
                key = "category"
                defaults[key] = value
            else:
                defaults[key] = value

    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, "NLP_ENABLED", False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict

            nlp_metadata = nlp_extract_metadata_dict(
                {
                    "title": defaults.get("title", None),
                    "abstract": defaults.get("abstract", None),
                    "purpose": defaults.get("purpose", None),
                }
            )
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get("regions", []))
                keywords.extend(nlp_metadata.get("keywords", []))
        except:
            print "NLP extraction failed."

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults["storeType"] = "dataStore"

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults["storeType"] = "coverageStore"

    # Create a Django object.
    layer, created = Layer.objects.get_or_create(name=valid_name, defaults=defaults)

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        layer.upload_session.layerfile_set.all().delete()
        layer.upload_session = upload_session
        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite
        layer.save()

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            layer.keywords.add(*keywords)

    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            layer.regions.add(*regions_resolved)

    return layer
Esempio n. 8
0
def file_upload(filename, name=None, user=None, title=None, abstract=None,
                keywords=[], category=None, regions=[],
                skip=True, overwrite=False, charset='UTF-8', main_id=None, form_metadata=None): #^^
    """Saves a layer in GeoNode asking as little information as possible.
       Only filename is required, user and title are optional.
    """
    # Get a valid user
    theuser = get_valid_user(user)

    # Create a new upload session
    upload_session = UploadSession.objects.create(user=theuser)

    # Get all the files uploaded with the layer
    files = get_files(filename)

    # Set a default title that looks nice ...
    if title is None:
        basename = os.path.splitext(os.path.basename(filename))[0]
        title = basename.title().replace('_', ' ')

    # Create a name from the title if it is not passed.
    if name is None:
        name = slugify(title).replace('-', '_')

    if category is not None:
        categories = TopicCategory.objects.filter(Q(identifier__iexact=category) | Q(gn_description__iexact=category))
        if len(categories) == 1:
            category = categories[0]
        else:
            category = None

    # Generate a name that is not taken if overwrite is False.
    valid_name = get_valid_layer_name(name, overwrite)

    # Add them to the upload session (new file fields are created).
    assigned_name = None
    for type_name, fn in files.items():
        with open(fn, 'rb') as f:
            if main_id != None: #^^
                layerfile = LayerFile( #^^
                    upload_session=upload_session, #^^
                    name=type_name, #^^
                    file=File(f, name='%s.%s' % (assigned_name or valid_name, type_name)) #^^
                ) #^^
                layerfile.main_id = main_id #^^
                layerfile.save() #^^
            else: #^^
                upload_session.layerfile_set.create(name=type_name,
                                                    file=File(f, name='%s.%s' % (assigned_name or valid_name, type_name)))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = upload_session.layerfile_set.all()[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

    # Get a bounding box
    bbox_x0, bbox_x1, bbox_y0, bbox_y1 = get_bbox(filename)

    # by default, if RESOURCE_PUBLISHING=True then layer.is_published
    # must be set to False
    is_published = True
    if settings.RESOURCE_PUBLISHING:
        is_published = False

    defaults = {
        'upload_session': upload_session,
        'title': title,
        'abstract': abstract,
        'owner': user,
        'charset': charset,
        'bbox_x0': bbox_x0,
        'bbox_x1': bbox_x1,
        'bbox_y0': bbox_y0,
        'bbox_y1': bbox_y1,
        'is_published': is_published,
        'category': category
    }
    
    #^^ start processing xml metadata file before form metadata fields, moved up here
    xml_metadata_created = None #^^
    # set metadata
    if 'xml' in files:
        xml_file = open(files['xml'])
        defaults['metadata_uploaded'] = True
        # get model properties from XML
        vals, regions, keywords = set_metadata(xml_file.read())

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'
                defaults[key] = value
            else:
                if key == 'date': #^^
                    print 'skipping date xml metadata' #^^
                    xml_metadata_created = value #^^
                else: #^^
                    defaults[key] = value
    #^^ end
    
    #^^ start processing layer metadata
    metadata = None
    
    if form_metadata != None:
        print 'debug form_metadata'
        print form_metadata
        
        metadata = json.loads(form_metadata)
        
        # check if owner exists
        if (metadata['owner'].isdigit()):
            try:
                owner = get_user_model().objects.get(id=metadata['owner'])
                defaults['owner'] = owner
            except get_user_model().DoesNotExist:
                pass
        
        defaults['title'] = metadata['title']
        
        # parse date format from datetimepicker YYYY-MM-DD HH:MM:SS
        if (len(metadata['date'])):
            try:
                parse(metadata['date'])
                defaults['date'] = metadata['date']
            except ValueError:
                pass
        
        defaults['date_type'] = metadata['date_type']
        defaults['edition'] = metadata['edition']
        defaults['abstract'] = metadata['abstract']
        defaults['purpose'] = metadata['purpose']
        defaults['maintenance_frequency'] = metadata['maintenance_frequency']
        
        try:
            defaults['restriction_code_type'] = RestrictionCodeType(id=metadata['restriction_code_type'])
        except:
            pass
        
        defaults['constraints_other'] = metadata['constraints_other']
        
        try:
            defaults['license'] = License(id=metadata['license'])
        except:
            pass
        
        defaults['language'] = metadata['language']
        
        try:
            defaults['spatial_representation_type'] = SpatialRepresentationType(id=metadata['spatial_representation_type'])
        except:
            pass
        
        if (len(metadata['temporal_extent_start'])):
            try:
                parse(metadata['temporal_extent_start'])
                defaults['temporal_extent_start'] = metadata['temporal_extent_start']
            except ValueError:
                pass
        
        if (len(metadata['temporal_extent_end'])):
            try:
                parse(metadata['temporal_extent_end'])
                defaults['temporal_extent_end'] = metadata['temporal_extent_end']
            except ValueError:
                pass
        
        defaults['supplemental_information'] = metadata['supplemental_information']
        defaults['distribution_url'] = metadata['distribution_url']
        defaults['distribution_description'] = metadata['distribution_description']
        defaults['data_quality_statement'] = metadata['data_quality_statement']
        
        if (metadata['featured'] != False):
            defaults['featured'] = True
        
        if (metadata['is_published'] != False):
            defaults['is_published'] = True
        
        # make sure thumbnail URL is valid link to an image
        if (len(metadata['thumbnail_url'])):
            val = URLValidator()
            try:
                thumbnail_url = metadata['thumbnail_url']
                val(thumbnail_url)
                
                if (thumbnail_url.lower().startswith(('http://', 'https://')) and thumbnail_url.lower().endswith(('.jpg', '.jpeg', '.png'))):
                    print 'debug thumbnail_url'
                    print thumbnail_url
                    defaults['thumbnail_url'] = thumbnail_url
            except ValidationError:
                pass
        
        if (len(metadata['keywords'])):
            keywords = [keyword.strip() for keyword in metadata['keywords'].split(',')]
        
        if (len(metadata['regions'])):
            regions_id = metadata['regions'].split(',')
            for region_id in regions_id:
                try:
                    region = Region.objects.get(id=region_id)
                    # store region code to be resolved below in resolve_regions()
                    regions.append(region.code)
                except Region.DoesNotExist:
                    pass
        
        try:
            defaults['category'] = TopicCategory(id=metadata['category_choice_field'])
        except:
            pass
    #^^ end processing layer metadata

    #^^ moved before processing form metadata fields
    """
    # set metadata
    if 'xml' in files:
        xml_file = open(files['xml'])
        defaults['metadata_uploaded'] = True
        # get model properties from XML
        vals, regions, keywords = set_metadata(xml_file.read())

        for key, value in vals.items():
            if key == 'spatial_representation_type':
                value = SpatialRepresentationType(identifier=value)
            elif key == 'topic_category':
                value, created = TopicCategory.objects.get_or_create(
                    identifier=value.lower(),
                    defaults={'description': '', 'gn_description': value})
                key = 'category'
                defaults[key] = value
            else:
                defaults[key] = value
    """
    #^^
    
    regions_resolved, regions_unresolved = resolve_regions(regions)
    keywords.extend(regions_unresolved)

    if getattr(settings, 'NLP_ENABLED', False):
        try:
            from geonode.contrib.nlp.utils import nlp_extract_metadata_dict
            nlp_metadata = nlp_extract_metadata_dict({
                'title': defaults.get('title', None),
                'abstract': defaults.get('abstract', None),
                'purpose': defaults.get('purpose', None)})
            if nlp_metadata:
                regions_resolved.extend(nlp_metadata.get('regions', []))
                keywords.extend(nlp_metadata.get('keywords', []))
        except:
            print "NLP extraction failed."

    # If it is a vector file, create the layer in postgis.
    if is_vector(filename):
        defaults['storeType'] = 'dataStore'

    # If it is a raster file, get the resolution.
    if is_raster(filename):
        defaults['storeType'] = 'coverageStore'

    # Create a Django object.
    layer, created = Layer.objects.get_or_create(
        name=valid_name,
        defaults=defaults
    )

    # Delete the old layers if overwrite is true
    # and the layer was not just created
    # process the layer again after that by
    # doing a layer.save()
    if not created and overwrite:
        layer.upload_session.layerfile_set.all().delete()
        layer.upload_session = upload_session
        # Pass the parameter overwrite to tell whether the
        # geoserver_post_save_signal should upload the new file or not
        layer.overwrite = overwrite
        layer.save()

    # Assign the keywords (needs to be done after saving)
    keywords = list(set(keywords))
    if keywords:
        if len(keywords) > 0:
            layer.keywords.add(*keywords)

    #^^ start saving metadata point of contact, layer needs to exist first
    if form_metadata != None and metadata['poc'].isdigit():
        try:
            contact = get_user_model().objects.get(id=metadata['poc'])
            layer.poc = contact
            layer.save()
        except get_user_model().DoesNotExist:
            pass
    #^^ end
    
    #^^ start saving metadata author, layer needs to exist first
    if form_metadata != None and metadata['metadata_author'].isdigit():
        try:
            author = get_user_model().objects.get(id=metadata['metadata_author'])
            layer.metadata_author = author
            layer.save()
        except get_user_model().DoesNotExist:
            pass
    #^^ end
    
    #^^ start saving icraf_dr_main's Layer reference key
    if main_id != None:
        try:
            main = Main.objects.get(id=main_id)
            main.layer = layer
            if xml_metadata_created:
                main.date_created = xml_metadata_created
            main.save()
        except:
            pass
    #^^ end
    
    # Assign the regions (needs to be done after saving)
    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            layer.regions.add(*regions_resolved)
    
    #^^ start send email notification to 'Pengelola Basis Data' group managers
    if 'notification' in settings.INSTALLED_APPS:
        try:
            group_approver = GroupProfile.objects.get(title='Pengelola Basis Data')
            if group_approver:
                group_managers = group_approver.get_managers()
                if group_managers:
                    print 'debug group_approver.title'
                    print group_approver.title
                    print group_managers
                    notif_ctx = {
                        'resource': layer,
                    }
                    print 'sending notification'
                    notification.send(group_managers, 'layer_created', notif_ctx)
        except:
            pass
    #^^ end
    
    return layer