コード例 #1
0
ファイル: utils.py プロジェクト: e7dal/hydroshare
def get_logical_file_type(res,
                          user,
                          file_id,
                          hs_file_type=None,
                          folder_path=None,
                          fail_feedback=True):
    """ Return the logical file type associated with a new file """
    if hs_file_type is None:
        res_file = utils.get_resource_file_by_id(res, file_id)
        ext_to_type = {
            ".tif": "GeoRaster",
            ".tiff": "GeoRaster",
            ".vrt": "GeoRaster",
            ".nc": "NetCDF",
            ".shp": "GeoFeature",
            ".json": "RefTimeseries",
            ".sqlite": "TimeSeries"
        }
        file_name = str(res_file)
        root, ext = os.path.splitext(file_name)
        ext = ext.lower()
        if ext in ext_to_type:
            # Check for special case of RefTimeseries having 2 extensions
            if ext == ".json":
                if not file_name.lower().endswith(".refts.json"):
                    if fail_feedback:
                        raise ValueError(
                            "Unsupported aggregation extension. Supported aggregation "
                            "extensions are: {}".format(
                                list(ext_to_type.keys())))
            hs_file_type = ext_to_type[ext]
        else:
            if fail_feedback:
                raise ValueError(
                    "Unsupported aggregation extension. Supported aggregation "
                    "extensions are: {}".format(list(ext_to_type.keys())))
            return None

    file_type_map = {
        "SingleFile": GenericLogicalFile,
        "FileSet": FileSetLogicalFile,
        "GeoRaster": GeoRasterLogicalFile,
        "NetCDF": NetCDFLogicalFile,
        'GeoFeature': GeoFeatureLogicalFile,
        'RefTimeseries': RefTimeseriesLogicalFile,
        'TimeSeries': TimeSeriesLogicalFile
    }
    if hs_file_type not in file_type_map:
        if fail_feedback:
            raise ValueError(
                "Unsupported aggregation type. Supported aggregation types are: {"
                "}".format(list(ext_to_type.keys())))
        return None
    logical_file_type_class = file_type_map[hs_file_type]
    return logical_file_type_class
コード例 #2
0
ファイル: generic.py プロジェクト: Lihao-CAU/hydroshare
    def set_file_type(cls,
                      resource,
                      user,
                      file_id=None,
                      folder_path=None,
                      extra_data={}):
        """
        Makes any physical file part of a generic aggregation type. The physical file must
        not already be a part of any aggregation.
        :param resource:
        :param user:
        :param file_id: id of the resource file to set logical file type
        :param folder_path: ignored here and a value for file_id is required
        :param extra_data: a dict that, if not empty, will be passed on to extra_data of
        corresponding logical file of the resource file
        :return:
        """

        log = logging.getLogger()
        if file_id is None:
            raise ValueError(
                "Must specify id of the file to be set as an aggregation type")

        res_file = utils.get_resource_file_by_id(resource, file_id)
        # resource file that is not part of an aggregation or part of a fileset aggregation
        # can be used for creating a single file aggregation
        if res_file.has_logical_file and not res_file.logical_file.is_fileset:
            raise ValidationError(
                "Selected file '{}' is already part of an aggregation".format(
                    res_file.file_name))

        logical_file = GenericLogicalFile.create(resource)
        dataset_name, _ = os.path.splitext(res_file.file_name)
        logical_file.dataset_name = dataset_name
        if extra_data:
            logical_file.extra_data = extra_data
        logical_file.save()
        res_file.logical_file_content_object = logical_file
        res_file.save()
        logical_file.create_aggregation_xml_documents()
        log.info("Generic aggregation was created for file:{}.".format(
            res_file.storage_path))
        post_add_generic_aggregation.send(sender=AbstractLogicalFile,
                                          resource=resource,
                                          file=logical_file)
コード例 #3
0
ファイル: utils.py プロジェクト: hydroshare/hydroshare
def get_logical_file_type(res, user, file_id, hs_file_type=None, folder_path=None,
                          fail_feedback=True):
    """ Return the logical file type associated with a new file """
    if hs_file_type is None:
        res_file = utils.get_resource_file_by_id(res, file_id)
        ext_to_type = {".tif": "GeoRaster", ".tiff": "GeoRaster", ".vrt": "GeoRaster",
                       ".nc": "NetCDF", ".shp": "GeoFeature", ".json": "RefTimeseries",
                       ".sqlite": "TimeSeries"}
        file_name = str(res_file)
        root, ext = os.path.splitext(file_name)
        ext = ext.lower()
        if ext in ext_to_type:
            # Check for special case of RefTimeseries having 2 extensions
            if ext == ".json":
                if not file_name.lower().endswith(".refts.json"):
                    if fail_feedback:
                        raise ValueError("Unsupported aggregation extension. Supported aggregation "
                                         "extensions are: {}".format(ext_to_type.keys()))
            hs_file_type = ext_to_type[ext]
        else:
            if fail_feedback:
                raise ValueError("Unsupported aggregation extension. Supported aggregation "
                                 "extensions are: {}".format(ext_to_type.keys()))
            return None

    file_type_map = {"SingleFile": GenericLogicalFile,
                     "FileSet": FileSetLogicalFile,
                     "GeoRaster": GeoRasterLogicalFile,
                     "NetCDF": NetCDFLogicalFile,
                     'GeoFeature': GeoFeatureLogicalFile,
                     'RefTimeseries': RefTimeseriesLogicalFile,
                     'TimeSeries': TimeSeriesLogicalFile}
    if hs_file_type not in file_type_map:
        if fail_feedback:
            raise ValueError("Unsupported aggregation type. Supported aggregation types are: {"
                             "}".format(ext_to_type.keys()))
        return None
    logical_file_type_class = file_type_map[hs_file_type]
    return logical_file_type_class
コード例 #4
0
ファイル: generic.py プロジェクト: hydroshare/hydroshare
    def set_file_type(cls, resource, user, file_id=None, folder_path=None, extra_data={}):
        """
        Makes any physical file part of a generic aggregation type. The physical file must
        not already be a part of any aggregation.
        :param resource:
        :param user:
        :param file_id: id of the resource file to set logical file type
        :param folder_path: ignored here and a value for file_id is required
        :param extra_data: a dict that, if not empty, will be passed on to extra_data of
        corresponding logical file of the resource file
        :return:
        """

        log = logging.getLogger()
        if file_id is None:
            raise ValueError("Must specify id of the file to be set as an aggregation type")

        res_file = utils.get_resource_file_by_id(resource, file_id)
        # resource file that is not part of an aggregation or part of a fileset aggregation
        # can be used for creating a single file aggregation
        if res_file.has_logical_file and not res_file.logical_file.is_fileset:
            raise ValidationError("Selected file '{}' is already part of an aggregation".format(
                res_file.file_name))

        logical_file = GenericLogicalFile.create(resource)
        dataset_name, _ = os.path.splitext(res_file.file_name)
        logical_file.dataset_name = dataset_name
        if extra_data:
            logical_file.extra_data = extra_data
        logical_file.save()
        res_file.logical_file_content_object = logical_file
        res_file.save()
        logical_file.create_aggregation_xml_documents()
        log.info("Generic aggregation was created for file:{}.".format(res_file.storage_path))
        post_add_generic_aggregation.send(
            sender=AbstractLogicalFile,
            resource=resource,
            file=logical_file
        )
コード例 #5
0
    def set_file_type(cls, resource, file_id, user):
        """
            Sets a json resource file to RefTimeseriesFile type
            :param resource: an instance of resource type CompositeResource
            :param file_id: id of the resource file to be set as RefTimeSeriesFile type
            :param user: user who is setting the file type
            :return:
            """

        log = logging.getLogger()

        # get the the selected resource file object
        res_file = utils.get_resource_file_by_id(resource, file_id)

        if res_file is None:
            raise ValidationError("File not found.")

        if res_file.extension != '.refts':
            raise ValidationError("Not a Ref Time Series file.")

        files_to_add_to_resource = []
        if res_file.has_generic_logical_file:
            try:
                json_file_content = _validate_json_file(res_file)
            except Exception as ex:
                raise ValidationError(ex.message)

            # get the file from irods to temp dir
            temp_file = utils.get_file_from_irods(res_file)
            temp_dir = os.path.dirname(temp_file)
            files_to_add_to_resource.append(temp_file)
            file_folder = res_file.file_folder
            with transaction.atomic():
                # first delete the json file that we retrieved from irods
                # for setting it to reftimeseries file type
                delete_resource_file(resource.short_id, res_file.id, user)

                # create a reftiemseries logical file object to be associated with
                # resource files
                logical_file = cls.create()

                logical_file.metadata.json_file_content = json_file_content
                logical_file.metadata.save()

                try:
                    # add the json file back to the resource
                    uploaded_file = UploadedFile(
                        file=open(temp_file, 'rb'),
                        name=os.path.basename(temp_file))
                    # the added resource file will be part of a new generic logical file by default
                    new_res_file = utils.add_file_to_resource(
                        resource, uploaded_file, folder=file_folder)

                    # delete the generic logical file object
                    if new_res_file.logical_file is not None:
                        # deleting the file level metadata object will delete the associated
                        # logical file object
                        new_res_file.logical_file.metadata.delete()

                    # make the resource file we added as part of the logical file
                    logical_file.add_resource_file(new_res_file)
                    logical_file.metadata.save()
                    logical_file.dataset_name = logical_file.metadata.get_title_from_json(
                    )
                    logical_file.save()
                    # extract metadata
                    _extract_metadata(resource, logical_file)
                    log.info(
                        "RefTimeseries file type - json file was added to the resource."
                    )
                except Exception as ex:
                    msg = "RefTimeseries file type. Error when setting file type. Error:{}"
                    msg = msg.format(ex.message)
                    log.exception(msg)
                    raise ValidationError(msg)
                finally:
                    # remove temp dir
                    if os.path.isdir(temp_dir):
                        shutil.rmtree(temp_dir)

                log.info("RefTimeseries file type was created.")

        else:
            err_msg = "Selected file is not part of a GenericLogical file."
            log.error(err_msg)
            raise ValidationError(err_msg)
コード例 #6
0
ファイル: netcdf.py プロジェクト: zhangmingda/hydroshare
    def set_file_type(cls, resource, file_id, user):
        """
            Sets a tif or zip raster resource file to GeoRasterFile type
            :param resource: an instance of resource type CompositeResource
            :param file_id: id of the resource file to be set as GeoRasterFile type
            :param user: user who is setting the file type
            :return:
            """

        # had to import it here to avoid import loop
        from hs_core.views.utils import create_folder

        log = logging.getLogger()

        # get the file from irods
        res_file = utils.get_resource_file_by_id(resource, file_id)

        if res_file is None:
            raise ValidationError("File not found.")

        if res_file.extension != '.nc':
            raise ValidationError("Not a NetCDF file.")

        # base file name (no path included)
        file_name = res_file.file_name
        # file name without the extension
        nc_file_name = file_name.split(".")[0]

        resource_metadata = []
        file_type_metadata = []
        files_to_add_to_resource = []
        if res_file.has_generic_logical_file:
            # get the file from irods to temp dir
            temp_file = utils.get_file_from_irods(res_file)
            temp_dir = os.path.dirname(temp_file)
            files_to_add_to_resource.append(temp_file)
            # file validation and metadata extraction
            nc_dataset = nc_utils.get_nc_dataset(temp_file)
            if isinstance(nc_dataset, netCDF4.Dataset):
                # Extract the metadata from netcdf file
                res_dublin_core_meta, res_type_specific_meta = nc_meta.get_nc_meta_dict(
                    temp_file)
                # populate resource_metadata and file_type_metadata lists with extracted metadata
                add_metadata_to_list(resource_metadata, res_dublin_core_meta,
                                     res_type_specific_meta,
                                     file_type_metadata, resource)

                # create the ncdump text file
                dump_file = create_header_info_txt_file(
                    temp_file, nc_file_name)
                files_to_add_to_resource.append(dump_file)
                file_folder = res_file.file_folder
                with transaction.atomic():
                    # first delete the netcdf file that we retrieved from irods
                    # for setting it to netcdf file type
                    delete_resource_file(resource.short_id, res_file.id, user)

                    # create a netcdf logical file object to be associated with
                    # resource files
                    logical_file = cls.create()

                    # by default set the dataset_name attribute of the logical file to the
                    # name of the file selected to set file type unless the extracted metadata
                    # has a value for title
                    dataset_title = res_dublin_core_meta.get('title', None)
                    if dataset_title is not None:
                        logical_file.dataset_name = dataset_title
                    else:
                        logical_file.dataset_name = nc_file_name
                    logical_file.save()

                    try:
                        # create a folder for the netcdf file type using the base file
                        # name as the name for the new folder
                        new_folder_path = cls.compute_file_type_folder(
                            resource, file_folder, nc_file_name)
                        fed_file_full_path = ''
                        if resource.resource_federation_path:
                            fed_file_full_path = os.path.join(
                                resource.root_path, new_folder_path)

                        create_folder(resource.short_id, new_folder_path)
                        log.info("Folder created:{}".format(new_folder_path))

                        new_folder_name = new_folder_path.split('/')[-1]
                        if file_folder is None:
                            upload_folder = new_folder_name
                        else:
                            upload_folder = os.path.join(
                                file_folder, new_folder_name)
                        # add all new files to the resource
                        for f in files_to_add_to_resource:
                            uploaded_file = UploadedFile(
                                file=open(f, 'rb'), name=os.path.basename(f))
                            new_res_file = utils.add_file_to_resource(
                                resource,
                                uploaded_file,
                                folder=upload_folder,
                                fed_res_file_name_or_path=fed_file_full_path)
                            # make each resource file we added as part of the logical file
                            logical_file.add_resource_file(new_res_file)

                        log.info(
                            "NetCDF file type - new files were added to the resource."
                        )
                    except Exception as ex:
                        msg = "NetCDF file type. Error when setting file type. Error:{}"
                        msg = msg.format(ex.message)
                        log.exception(msg)
                        # TODO: in case of any error put the original file back and
                        # delete the folder that was created
                        raise ValidationError(msg)
                    finally:
                        # remove temp dir
                        if os.path.isdir(temp_dir):
                            shutil.rmtree(temp_dir)

                    log.info("NetCDF file type was created.")

                    # use the extracted metadata to populate resource metadata
                    for element in resource_metadata:
                        # here k is the name of the element
                        # v is a dict of all element attributes/field names and field values
                        k, v = element.items()[0]
                        if k == 'title':
                            # update title element
                            title_element = resource.metadata.title
                            resource.metadata.update_element(
                                'title', title_element.id, **v)
                        else:
                            resource.metadata.create_element(k, **v)

                    log.info("Resource - metadata was saved to DB")

                    # use the extracted metadata to populate file metadata
                    for element in file_type_metadata:
                        # here k is the name of the element
                        # v is a dict of all element attributes/field names and field values
                        k, v = element.items()[0]
                        if k == 'subject':
                            logical_file.metadata.keywords = v
                            logical_file.metadata.save()
                        else:
                            logical_file.metadata.create_element(k, **v)
                    log.info("NetCDF file type - metadata was saved to DB")
            else:
                err_msg = "Not a valid NetCDF file. File type file validation failed."
                log.error(err_msg)
                # remove temp dir
                if os.path.isdir(temp_dir):
                    shutil.rmtree(temp_dir)
                raise ValidationError(err_msg)
コード例 #7
0
    def set_file_type(cls, resource, user, file_id=None, folder_path=None):
        """ Creates a RefTimeseriesLogicalFile (aggregation) from a json resource file (.refts.json)
        """

        log = logging.getLogger()
        if file_id is None:
            raise ValueError("Must specify id of the file to be set as an aggregation type")

        # get the the selected resource file object
        res_file = utils.get_resource_file_by_id(resource, file_id)

        if res_file is None:
            raise ValidationError("File not found.")

        if not res_file.file_name.lower().endswith('.refts.json'):
            raise ValidationError("Selected file '{}' is not a Ref Time Series file.".format(
                res_file.file_name))

        if res_file.has_logical_file and not res_file.logical_file.is_fileset:
            raise ValidationError("Selected file '{}' is already part of an aggregation".format(
                res_file.file_name))

        try:
            json_file_content = _validate_json_file(res_file)
        except Exception as ex:
            log.exception("failed json validation")
            raise ValidationError(ex.message)

        # get the file from irods to temp dir
        temp_file = utils.get_file_from_irods(res_file)
        temp_dir = os.path.dirname(temp_file)

        with transaction.atomic():
            # create a reftiemseries logical file object to be associated with
            # resource files
            logical_file = cls.create(resource)
            # create logical file record in DB
            logical_file.save()
            logical_file.metadata.json_file_content = json_file_content
            logical_file.metadata.save()

            try:
                # make the json file part of the aggregation
                logical_file.add_resource_file(res_file)
                logical_file.dataset_name = logical_file.metadata.get_title_from_json()
                logical_file.save()
                # extract metadata
                _extract_metadata(resource, logical_file)
                log.info("RefTimeseries aggregation type - json file was added to the resource.")
                logical_file._finalize(user, resource, folder_created=False,
                                       res_files_to_delete=[])

                log.info("RefTimeseries aggregation type was created.")
            except Exception as ex:
                msg = "RefTimeseries aggregation type. Error when setting aggregation " \
                      "type. Error:{}"
                msg = msg.format(ex.message)
                log.exception(msg)
                raise ValidationError(msg)
            finally:
                # remove temp dir
                if os.path.isdir(temp_dir):
                    shutil.rmtree(temp_dir)
コード例 #8
0
    def set_file_type(cls, resource, file_id, user):
        """
            Sets a tif or zip raster resource file to GeoRasterFile type
            :param resource: an instance of resource type CompositeResource
            :param file_id: id of the resource file to be set as GeoRasterFile type
            :param user: user who is setting the file type
            :return:
            """

        # had to import it here to avoid import loop
        from hs_core.views.utils import create_folder, remove_folder

        log = logging.getLogger()

        # get the file from irods
        res_file = utils.get_resource_file_by_id(resource, file_id)

        # base file name (no path included)
        file_name = utils.get_resource_file_name_and_extension(res_file)[1]
        # file name without the extension
        file_name = file_name[:-len(res_file.extension)]
        file_folder = res_file.file_folder
        upload_folder = ''
        if res_file is not None and res_file.has_generic_logical_file:
            # get the file from irods to temp dir
            temp_file = utils.get_file_from_irods(res_file)
            # validate the file
            error_info, files_to_add_to_resource = raster_file_validation(
                raster_file=temp_file)
            if not error_info:
                log.info("Geo raster file type file validation successful.")
                # extract metadata
                temp_dir = os.path.dirname(temp_file)
                temp_vrt_file_path = [
                    os.path.join(temp_dir, f) for f in os.listdir(temp_dir)
                    if '.vrt' == os.path.splitext(f)[1]
                ].pop()
                metadata = extract_metadata(temp_vrt_file_path)
                log.info(
                    "Geo raster file type metadata extraction was successful.")
                with transaction.atomic():
                    # create a geo raster logical file object to be associated with resource files
                    logical_file = cls.create()
                    # by default set the dataset_name attribute of the logical file to the
                    # name of the file selected to set file type
                    logical_file.dataset_name = file_name
                    logical_file.save()

                    try:
                        # create a folder for the raster file type using the base file name as the
                        # name for the new folder
                        new_folder_path = cls.compute_file_type_folder(
                            resource, file_folder, file_name)

                        log.info("Folder created:{}".format(new_folder_path))
                        create_folder(resource.short_id, new_folder_path)

                        new_folder_name = new_folder_path.split('/')[-1]
                        if file_folder is None:
                            upload_folder = new_folder_name
                        else:
                            upload_folder = os.path.join(
                                file_folder, new_folder_name)

                        # add all new files to the resource
                        for f in files_to_add_to_resource:
                            uploaded_file = UploadedFile(
                                file=open(f, 'rb'), name=os.path.basename(f))
                            # the added resource file will be part of a new generic logical file
                            # by default
                            new_res_file = utils.add_file_to_resource(
                                resource, uploaded_file, folder=upload_folder)

                            # delete the generic logical file object
                            if new_res_file.logical_file is not None:
                                # deleting the file level metadata object will delete the associated
                                # logical file object
                                new_res_file.logical_file.metadata.delete()

                            # make each resource file we added as part of the logical file
                            logical_file.add_resource_file(new_res_file)

                        log.info(
                            "Geo raster file type - new files were added to the resource."
                        )

                        # use the extracted metadata to populate file metadata
                        for element in metadata:
                            # here k is the name of the element
                            # v is a dict of all element attributes/field names and field values
                            k, v = element.items()[0]
                            logical_file.metadata.create_element(k, **v)
                        log.info(
                            "Geo raster file type - metadata was saved to DB")
                        # set resource to private if logical file is missing required metadata
                        resource.update_public_and_discoverable()
                        # delete the original resource file
                        delete_resource_file(resource.short_id, res_file.id,
                                             user)
                        log.info("Deleted original resource file.")
                    except Exception as ex:
                        msg = "Geo raster file type. Error when setting file type. Error:{}"
                        msg = msg.format(ex.message)
                        log.exception(msg)
                        if upload_folder:
                            # delete any new files uploaded as part of setting file type
                            folder_to_remove = os.path.join(
                                'data', 'contents', upload_folder)
                            remove_folder(user, resource.short_id,
                                          folder_to_remove)
                            log.info("Deleted newly created file type folder")
                        raise ValidationError(msg)
                    finally:
                        # remove temp dir
                        if os.path.isdir(temp_dir):
                            shutil.rmtree(temp_dir)
            else:
                err_msg = "Geo raster file type file validation failed.{}".format(
                    ' '.join(error_info))
                log.info(err_msg)
                raise ValidationError(err_msg)
        else:
            if res_file is None:
                err_msg = "Failed to set Geo raster file type. " \
                          "Resource doesn't have the specified file."
                log.error(err_msg)
                raise ValidationError(err_msg)
            else:
                err_msg = "Failed to set Geo raster file type." \
                          "The specified file doesn't have a generic logical file type."
                log.error(err_msg)
                raise ValidationError(err_msg)
コード例 #9
0
    def set_file_type(cls, resource, user, file_id=None, folder_path=None):
        """ Creates a RefTimeseriesLogicalFile (aggregation) from a json resource file (.refts.json)
        """

        log = logging.getLogger()
        if file_id is None:
            raise ValueError("Must specify id of the file to be set as an aggregation type")

        # get the the selected resource file object
        res_file = utils.get_resource_file_by_id(resource, file_id)

        if res_file is None:
            raise ValidationError("File not found.")

        if not res_file.file_name.lower().endswith('.refts.json'):
            raise ValidationError("Selected file '{}' is not a Ref Time Series file.".format(
                res_file.file_name))

        if res_file.has_logical_file and not res_file.logical_file.is_fileset:
            raise ValidationError("Selected file '{}' is already part of an aggregation".format(
                res_file.file_name))

        try:
            json_file_content = _validate_json_file(res_file)
        except Exception as ex:
            log.exception("failed json validation")
            raise ValidationError(ex.message)

        # get the file from irods to temp dir
        temp_file = utils.get_file_from_irods(res_file)
        temp_dir = os.path.dirname(temp_file)

        with transaction.atomic():
            # create a reftiemseries logical file object to be associated with
            # resource files
            logical_file = cls.create(resource)
            # create logical file record in DB
            logical_file.save()
            logical_file.metadata.json_file_content = json_file_content
            logical_file.metadata.save()

            try:
                # make the json file part of the aggregation
                logical_file.add_resource_file(res_file)
                logical_file.dataset_name = logical_file.metadata.get_title_from_json()
                logical_file.save()
                # extract metadata
                _extract_metadata(resource, logical_file)
                log.info("RefTimeseries aggregation type - json file was added to the resource.")
                logical_file._finalize(user, resource, folder_created=False,
                                       res_files_to_delete=[])

                log.info("RefTimeseries aggregation type was created.")
                post_add_reftimeseries_aggregation.send(
                    sender=AbstractLogicalFile,
                    resource=resource,
                    file=logical_file
                )
            except Exception as ex:
                msg = "RefTimeseries aggregation type. Error when setting aggregation " \
                      "type. Error:{}"
                msg = msg.format(ex.message)
                log.exception(msg)
                raise ValidationError(msg)
            finally:
                # remove temp dir
                if os.path.isdir(temp_dir):
                    shutil.rmtree(temp_dir)
コード例 #10
0
ファイル: geofeature.py プロジェクト: LukeKuenneke/hydroshare
    def set_file_type(cls, resource, file_id, user):
        """
        Sets a .shp or .zip resource file to GeoFeatureFile type
        :param resource: an instance of resource type CompositeResource
        :param file_id: id of the resource file to be set as GeoFeatureFile type
        :param user: user who is setting the file type
        :return:
        """

        # had to import it here to avoid import loop
        from hs_core.views.utils import create_folder, remove_folder

        log = logging.getLogger()

        # get the file from irods
        res_file = utils.get_resource_file_by_id(resource, file_id)

        if res_file is None or not res_file.exists:
            raise ValidationError("File not found.")

        if res_file.extension.lower() not in ('.zip', '.shp'):
            raise ValidationError("Not a valid geographic feature file.")

        if not res_file.has_generic_logical_file:
            raise ValidationError(
                "Selected file must be part of a generic file type.")

        try:
            meta_dict, shape_files, shp_res_files = extract_metadata_and_files(
                resource, res_file)
        except ValidationError as ex:
            log.exception(ex.message)
            raise ex

        # hold on to temp dir for final clean up
        temp_dir = os.path.dirname(shape_files[0])
        file_name = res_file.file_name
        # file name without the extension
        base_file_name = file_name[:-len(res_file.extension)]
        xml_file = ''
        for f in shape_files:
            if f.lower().endswith('.shp.xml'):
                xml_file = f
                break

        file_folder = res_file.file_folder
        file_type_success = False
        upload_folder = ''
        msg = "GeoFeature file type. Error when setting file type. Error:{}"
        with transaction.atomic():
            # create a GeoFeature logical file object to be associated with
            # resource files
            logical_file = cls.create()

            # by default set the dataset_name attribute of the logical file to the
            # name of the file selected to set file type
            logical_file.dataset_name = base_file_name
            logical_file.save()
            try:
                # create a folder for the geofeature file type using the base file
                # name as the name for the new folder
                new_folder_path = cls.compute_file_type_folder(
                    resource, file_folder, base_file_name)
                create_folder(resource.short_id, new_folder_path)
                log.info("Folder created:{}".format(new_folder_path))

                new_folder_name = new_folder_path.split('/')[-1]
                if file_folder is None:
                    upload_folder = new_folder_name
                else:
                    upload_folder = os.path.join(file_folder, new_folder_name)
                # add all new files to the resource
                files_to_add_to_resource = shape_files
                for fl in files_to_add_to_resource:
                    uploaded_file = UploadedFile(file=open(fl, 'rb'),
                                                 name=os.path.basename(fl))
                    new_res_file = utils.add_file_to_resource(
                        resource, uploaded_file, folder=upload_folder)

                    # make each resource file we added part of the logical file
                    logical_file.add_resource_file(new_res_file)

                log.info(
                    "GeoFeature file type - files were added to the file type."
                )
                add_metadata(resource, meta_dict, xml_file, logical_file)
                log.info(
                    "GeoFeature file type and resource level metadata updated."
                )
                # delete the original resource files used as part of setting file type
                for fl in shp_res_files:
                    delete_resource_file(resource.short_id, fl.id, user)
                log.info("Deleted original resource files.")
                file_type_success = True
            except Exception as ex:
                msg = msg.format(ex.message)
                log.exception(msg)
            finally:
                # remove temp dir
                if os.path.isdir(temp_dir):
                    shutil.rmtree(temp_dir)

        if not file_type_success and upload_folder:
            # delete any new files uploaded as part of setting file type
            folder_to_remove = os.path.join('data', 'contents', upload_folder)
            remove_folder(user, resource.short_id, folder_to_remove)
            log.info("Deleted newly created file type folder")
            raise ValidationError(msg)