コード例 #1
0
    def handle(self, *args, **options):
        logger = logging.getLogger(__name__)
        resource_counter = 0
        to_resource_type = 'CompositeResource'
        msg = "THERE ARE CURRENTLY {} GENERIC RESOURCES PRIOR TO CONVERSION.".format(
            GenericResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))
        for gen_res in GenericResource.objects.all():
            # change the resource_type
            gen_res.resource_type = to_resource_type
            gen_res.content_model = to_resource_type.lower()
            gen_res.save()

            # get the converted resource object - CompositeResource
            comp_res = gen_res.get_content_model()

            # update url attribute of the metadata 'type' element
            type_element = comp_res.metadata.type
            type_element.url = '{0}/terms/{1}'.format(current_site_url(), to_resource_type)
            type_element.save()

            # set resource to dirty so that resource level xml files (resource map and
            # metadata xml files) will be generated as part of next bag download
            set_dirty_bag_flag(comp_res)
            resource_counter += 1

        msg = "{} GENERIC RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE.".format(
            resource_counter)
        logger.info(msg)
        print(">> {}".format(msg))
        msg = "THERE ARE CURRENTLY {} GENERIC RESOURCES AFTER CONVERSION.".format(
            GenericResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))
コード例 #2
0
    def handle(self, *args, **options):
        logger = logging.getLogger(__name__)
        resource_counter = 0
        to_resource_type = 'CompositeResource'
        msg = "THERE ARE CURRENTLY {} MULTIDIMENSIONAL RESOURCES PRIOR TO CONVERSION.".format(
            NetcdfResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))

        for nc_res in NetcdfResource.objects.all():
            # check resource exists on irods
            istorage = nc_res.get_irods_storage()
            if not istorage.exists(nc_res.root_path):
                err_msg = "NetCDF resource not found in irods (ID: {})".format(
                    nc_res.short_id)
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))
                # skip this netcdf resource for migration
                continue

            # get the nc file name which needs to be used to create a new folder
            nc_file = None
            txt_file = None
            if nc_res.files.count() == 2:
                for res_file in nc_res.files.all():
                    if res_file.extension.lower() == '.nc':
                        nc_file = res_file
                    elif res_file.file_name.lower().endswith(
                            'header_info.txt'):
                        txt_file = res_file

            create_nc_aggregation = nc_file is not None and txt_file is not None
            if create_nc_aggregation:
                # check resource files exist on irods
                file_missing = False
                for res_file in nc_res.files.all():
                    file_path = res_file.public_path
                    if not istorage.exists(file_path):
                        err_msg = "File path not found in irods:{}".format(
                            file_path)
                        logger.error(err_msg)
                        err_msg = "Failed to convert netcdf resource (ID: {}). Resource file is " \
                                  "missing on irods".format(nc_res.short_id)
                        print("Error:>> {}".format(err_msg))
                        file_missing = True
                        break
                if file_missing:
                    # skip this corrupt netcdf resource for migration
                    continue

            # change the resource_type
            nc_metadata_obj = nc_res.metadata
            nc_res.resource_type = to_resource_type
            nc_res.content_model = to_resource_type.lower()
            nc_res.save()
            # get the converted resource object - CompositeResource
            comp_res = nc_res.get_content_model()

            # set CoreMetaData object for the composite resource
            core_meta_obj = CoreMetaData.objects.create()
            comp_res.content_object = core_meta_obj
            # migrate netcdf resource core metadata elements to composite resource
            migrate_core_meta_elements(nc_metadata_obj, comp_res)

            # update url attribute of the metadata 'type' element
            type_element = comp_res.metadata.type
            type_element.url = '{0}/terms/{1}'.format(current_site_url(),
                                                      to_resource_type)
            type_element.save()
            if create_nc_aggregation:
                # create a NetCDF aggregation
                nc_aggr = None
                try:
                    nc_aggr = NetCDFLogicalFile.create(resource=comp_res)
                except Exception as ex:
                    err_msg = 'Failed to create NetCDF aggregation for resource (ID: {})'
                    err_msg = err_msg.format(nc_res.short_id)
                    err_msg = err_msg + '\n' + ex.message
                    logger.error(err_msg)
                    print("Error:>> {}".format(err_msg))

                if nc_aggr is not None:
                    # set aggregation dataset title
                    nc_aggr.dataset_name = comp_res.metadata.title.value
                    nc_aggr.save()
                    # make the res files part of the aggregation
                    for res_file in comp_res.files.all():
                        nc_aggr.add_resource_file(res_file)

                    # migrate netcdf specific metadata to aggregation
                    for variable in nc_metadata_obj.variables.all():
                        variable.content_object = nc_aggr.metadata
                        variable.save()

                    # create aggregation level coverage elements
                    for coverage in comp_res.metadata.coverages.all():
                        aggr_coverage = Coverage()
                        aggr_coverage.type = coverage.type
                        aggr_coverage._value = coverage._value
                        aggr_coverage.content_object = nc_aggr.metadata
                        aggr_coverage.save()

                    org_coverage = nc_metadata_obj.originalCoverage
                    if org_coverage:
                        org_coverage.content_object = nc_aggr.metadata
                        org_coverage.save()

                    # create aggregation level keywords
                    keywords = [
                        sub.value for sub in comp_res.metadata.subjects.all()
                    ]
                    nc_aggr.metadata.keywords = keywords
                    # set aggregation metadata dirty status to that of the netcdf resource metadata
                    # dirty status - this would trigger netcdf file update for the new aggregation
                    # if metadata is dirty
                    nc_aggr.metadata.is_dirty = nc_metadata_obj.is_dirty
                    nc_aggr.metadata.save()
                    # create aggregation level xml files
                    nc_aggr.create_aggregation_xml_documents()
                    msg = 'One Multidimensional aggregation was created in resource (ID: {})'
                    msg = msg.format(comp_res.short_id)
                    logger.info(msg)

            # set resource to dirty so that resource level xml files (resource map and
            # metadata xml files) will be re-generated as part of next bag download
            comp_res.save()
            try:
                set_dirty_bag_flag(comp_res)
            except Exception as ex:
                err_msg = 'Failed to set bag flag dirty for the converted resource (ID: {})'
                err_msg = err_msg.format(nc_res.short_id)
                err_msg = err_msg + '\n' + ex.message
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))

            resource_counter += 1
            # delete the instance of NetCdfMetaData that was part of the original netcdf resource
            nc_metadata_obj.delete()
            msg = 'Multidimensional resource (ID: {}) was converted to Composite Resource type'
            msg = msg.format(comp_res.short_id)
            logger.info(msg)

        msg = "{} MULTIDIMENSIONAL RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE.".format(
            resource_counter)
        logger.info(msg)
        print(">> {}".format(msg))
        msg = "THERE ARE CURRENTLY {} MULTIDIMENSIONAL RESOURCES AFTER CONVERSION.".format(
            NetcdfResource.objects.all().count())
        logger.info(msg)
        if NetcdfResource.objects.all().count() > 0:
            msg = "NOT ALL MULTIDIMENSIONAL RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE TYPE"
            logger.error(msg)
        print(">> {}".format(msg))
コード例 #3
0
    def handle(self, *args, **options):
        logger = logging.getLogger(__name__)
        resource_counter = 0
        to_resource_type = 'CompositeResource'
        msg = "THERE ARE CURRENTLY {} GEOFEATURE RESOURCES PRIOR TO CONVERSION.".format(
            GeographicFeatureResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))

        for gf_res in GeographicFeatureResource.objects.all():
            # check resource exists on irods
            istorage = gf_res.get_irods_storage()
            if not istorage.exists(gf_res.root_path):
                err_msg = "Geofeature resource not found in irods (ID: {})".format(
                    gf_res.short_id)
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))
                # skip this geofeature resource for migration
                continue

            create_gf_aggregation = False
            if gf_res.has_required_content_files() and \
                    gf_res.metadata.geometryinformation is not None:
                create_gf_aggregation = True

            if create_gf_aggregation:
                # check resource files exist on irods
                file_missing = False
                for res_file in gf_res.files.all():
                    file_path = res_file.public_path
                    if not istorage.exists(file_path):
                        err_msg = "File path not found in irods:{}".format(
                            file_path)
                        logger.error(err_msg)
                        err_msg = "Failed to convert geofeature resource (ID: {}). " \
                                  "Resource file is missing on irods"
                        err_msg = err_msg.format(gf_res.short_id)
                        print("Error:>> {}".format(err_msg))
                        file_missing = True
                        break
                if file_missing:
                    # skip this corrupt geofeature resource for migration
                    continue

            # change the resource_type
            gf_metadata_obj = gf_res.metadata
            gf_res.resource_type = to_resource_type
            gf_res.content_model = to_resource_type.lower()
            gf_res.save()
            # get the converted resource object - CompositeResource
            comp_res = gf_res.get_content_model()

            # set CoreMetaData object for the composite resource
            core_meta_obj = CoreMetaData.objects.create()
            comp_res.content_object = core_meta_obj

            # migrate geofeature resource core metadata elements to composite resource
            migrate_core_meta_elements(gf_metadata_obj, comp_res)

            # update url attribute of the metadata 'type' element
            type_element = comp_res.metadata.type
            type_element.url = '{0}/terms/{1}'.format(current_site_url(),
                                                      to_resource_type)
            type_element.save()
            if create_gf_aggregation:
                # create a Geofeature aggregation
                gf_aggr = None
                try:
                    gf_aggr = GeoFeatureLogicalFile.create(resource=comp_res)
                except Exception as ex:
                    err_msg = 'Failed to create Geofeature aggregation for resource (ID: {})'
                    err_msg = err_msg.format(gf_res.short_id)
                    err_msg = err_msg + '\n' + ex.message
                    logger.error(err_msg)
                    print("Error:>> {}".format(err_msg))

                if gf_aggr is not None:
                    # set aggregation dataset title
                    gf_aggr.dataset_name = comp_res.metadata.title.value
                    gf_aggr.save()
                    # make the res files part of the aggregation
                    for res_file in comp_res.files.all():
                        gf_aggr.add_resource_file(res_file)

                    # migrate geofeature specific metadata to aggregation
                    for fieldinfo in gf_metadata_obj.fieldinformations.all():
                        fieldinfo.content_object = gf_aggr.metadata
                        fieldinfo.save()

                    # create aggregation level coverage elements
                    for coverage in comp_res.metadata.coverages.all():
                        aggr_coverage = Coverage()
                        aggr_coverage.type = coverage.type
                        aggr_coverage._value = coverage._value
                        aggr_coverage.content_object = gf_aggr.metadata
                        aggr_coverage.save()

                    org_coverage = gf_metadata_obj.originalcoverage
                    if org_coverage:
                        org_coverage.content_object = gf_aggr.metadata
                        org_coverage.save()

                    geom_info = gf_metadata_obj.geometryinformation
                    if geom_info:
                        geom_info.content_object = gf_aggr.metadata
                        geom_info.save()

                    # create aggregation level keywords
                    keywords = [
                        sub.value for sub in comp_res.metadata.subjects.all()
                    ]
                    gf_aggr.metadata.keywords = keywords

                    # create aggregation level xml files
                    gf_aggr.create_aggregation_xml_documents()
                    msg = 'One Geofeature aggregation was created in resource (ID: {})'
                    msg = msg.format(comp_res.short_id)
                    logger.info(msg)

            comp_res.save()
            # set resource to dirty so that resource level xml files (resource map and
            # metadata xml files) will be re-generated as part of next bag download
            try:
                set_dirty_bag_flag(comp_res)
            except Exception as ex:
                err_msg = 'Failed to set bag flag dirty for the converted resource (ID: {})'
                err_msg = err_msg.format(gf_res.short_id)
                err_msg = err_msg + '\n' + ex.message
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))

            resource_counter += 1
            # delete the instance of GeographicFeatureMetaData that was part of the original
            # geofeature resource
            gf_metadata_obj.delete()
            msg = 'Geofeature resource (ID: {}) was converted to Composite Resource type'
            msg = msg.format(comp_res.short_id)
            logger.info(msg)

        msg = "{} GEOFEATURE RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE.".format(
            resource_counter)
        logger.info(msg)
        print(">> {}".format(msg))
        msg = "THERE ARE CURRENTLY {} GEOFEATURE RESOURCES AFTER CONVERSION.".format(
            GeographicFeatureResource.objects.all().count())
        logger.info(msg)
        if GeographicFeatureResource.objects.all().count() > 0:
            msg = "NOT ALL GEOFEATURE RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE TYPE"
            logger.error(msg)
        print(">> {}".format(msg))
コード例 #4
0
 def normalize_metadata(metadata_str):
     """Prepares metadata string to match resource id and hydroshare url of original"""
     return metadata_str\
         .replace(current_site_url(), "http://www.hydroshare.org")\
         .replace(res.short_id, "97523bdb7b174901b3fc2d89813458f1")
コード例 #5
0
    def handle(self, *args, **options):
        logger = logging.getLogger(__name__)
        resource_counter = 0
        create_raster_aggregation = False
        to_resource_type = 'CompositeResource'
        msg = "THERE ARE CURRENTLY {} RASTER RESOURCES PRIOR TO CONVERSION.".format(
            RasterResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))

        for rast_res in RasterResource.objects.all():
            # check resource exists on irods
            istorage = rast_res.get_irods_storage()
            create_raster_aggregation = False
            if not istorage.exists(rast_res.root_path):
                err_msg = "Raster resource not found in irods (ID: {})".format(rast_res.short_id)
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))
                # skip this raster resource
                continue

            if rast_res.metadata.cellInformation is not None:
                # get the vrt file name which needs to be used to create a new folder for
                # raster aggregation
                vrt_file = None
                for res_file in rast_res.files.all():
                    if res_file.extension.lower() == '.vrt':
                        vrt_file = res_file
                        break

                create_raster_aggregation = vrt_file is not None
                if create_raster_aggregation:
                    # check resource files exist on irods
                    file_missing = False
                    for res_file in rast_res.files.all():
                        file_path = res_file.public_path
                        if not istorage.exists(file_path):
                            err_msg = "File path not found in irods:{}".format(file_path)
                            logger.error(err_msg)
                            err_msg = "Failed to convert raster resource (ID: {}). " \
                                      "Resource file is missing on irods".format(rast_res.short_id)
                            print("Error:>> {}".format(err_msg))
                            file_missing = True
                            break
                    if file_missing:
                        # skip this corrupt raster resource for migration
                        continue

            # change the resource_type
            ras_metadata_obj = rast_res.metadata
            rast_res.resource_type = to_resource_type
            rast_res.content_model = to_resource_type.lower()
            rast_res.save()
            # get the converted resource object - CompositeResource
            comp_res = rast_res.get_content_model()

            # set CoreMetaData object for the composite resource
            core_meta_obj = CoreMetaData.objects.create()
            comp_res.content_object = core_meta_obj
            # migrate raster resource core metadata elements to composite resource
            migrate_core_meta_elements(ras_metadata_obj, comp_res)

            # update url attribute of the metadata 'type' element
            type_element = comp_res.metadata.type
            type_element.url = '{0}/terms/{1}'.format(current_site_url(), to_resource_type)
            type_element.save()
            if create_raster_aggregation:
                # create a Raster aggregation
                ras_aggr = None
                try:
                    ras_aggr = GeoRasterLogicalFile.create(resource=comp_res)
                except Exception as ex:
                    err_msg = 'Failed to create raster aggregation for resource (ID: {})'
                    err_msg = err_msg.format(rast_res.short_id)
                    err_msg = err_msg + '\n' + ex.message
                    logger.error(err_msg)
                    print("Error:>> {}".format(err_msg))

                if ras_aggr is not None:
                    # set aggregation dataset title
                    ras_aggr.dataset_name = comp_res.metadata.title.value
                    ras_aggr.save()
                    # make the res files part of the aggregation
                    for res_file in comp_res.files.all():
                        ras_aggr.add_resource_file(res_file)

                    # migrate raster specific metadata to aggregation
                    for bandinfo in ras_metadata_obj.bandInformations:
                        bandinfo.content_object = ras_aggr.metadata
                        bandinfo.save()

                    # create aggregation level spatial coverage element
                    # note - the resource level spatial coverage which is a core metadata
                    # element gets populated as part of raster resource creation
                    spatial_coverage = comp_res.metadata.spatial_coverage
                    if spatial_coverage:
                        aggr_coverage = Coverage()
                        aggr_coverage.type = spatial_coverage.type
                        aggr_coverage._value = spatial_coverage._value
                        aggr_coverage.content_object = ras_aggr.metadata
                        aggr_coverage.save()

                    org_coverage = ras_metadata_obj.originalCoverage
                    if org_coverage:
                        org_coverage.content_object = ras_aggr.metadata
                        org_coverage.save()

                    cell_info = ras_metadata_obj.cellInformation
                    if cell_info:
                        cell_info.content_object = ras_aggr.metadata
                        cell_info.save()

                    # create aggregation level xml files
                    ras_aggr.create_aggregation_xml_documents()
                    msg = 'One Raster aggregation was created in resource (ID: {})'
                    msg = msg.format(comp_res.short_id)
                    logger.info(msg)
            # set resource to dirty so that resource level xml files (resource map and
            # metadata xml files) will be re-generated as part of next bag download
            comp_res.save()
            try:
                set_dirty_bag_flag(comp_res)
            except Exception as ex:
                err_msg = 'Failed to set bag flag dirty for the converted resource (ID: {})'
                err_msg = err_msg.format(rast_res.short_id)
                err_msg = err_msg + '\n' + ex.message
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))

            resource_counter += 1
            # delete the instance of RasterMetaData that was part of the original raster resource
            ras_metadata_obj.delete()
            msg = 'Raster resource (ID: {}) was converted to Composite Resource type'
            msg = msg.format(comp_res.short_id)
            logger.info(msg)

        msg = "{} RASTER RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE.".format(
            resource_counter)
        logger.info(msg)
        print(">> {}".format(msg))
        msg = "THERE ARE CURRENTLY {} RASTER RESOURCES AFTER CONVERSION.".format(
            RasterResource.objects.all().count())
        logger.info(msg)
        if RasterResource.objects.all().count() > 0:
            msg = "NOT ALL RASTER RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE TYPE"
            logger.error(msg)
        print(">> {}".format(msg))
コード例 #6
0
    def handle(self, *args, **options):
        logger = logging.getLogger(__name__)
        resource_counter = 0
        to_resource_type = 'CompositeResource'
        msg = "THERE ARE CURRENTLY {} TIMESERIES RESOURCES PRIOR TO CONVERSION.".format(
            TimeSeriesResource.objects.all().count())
        logger.info(msg)
        print(">> {}".format(msg))

        for ts_res in TimeSeriesResource.objects.all():
            # check resource exists on irods
            istorage = ts_res.get_irods_storage()
            if not istorage.exists(ts_res.root_path):
                err_msg = "Timeseries resource not found in irods (ID: {})".format(
                    ts_res.short_id)
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))
                # skip this timeseries resource for migration
                continue

            sqlite_file = None
            res_file_count = ts_res.files.count()
            if res_file_count == 1 or res_file_count == 2:
                for res_file in ts_res.files.all():
                    if res_file.extension.lower() == '.sqlite':
                        sqlite_file = res_file

            create_ts_aggregation = sqlite_file is not None
            if create_ts_aggregation:
                # check resource files exist on irods
                file_missing = False
                for res_file in ts_res.files.all():
                    file_path = res_file.public_path
                    if not istorage.exists(file_path):
                        err_msg = "File path not found in irods:{}".format(
                            file_path)
                        logger.error(err_msg)
                        err_msg = "Failed to convert timeseries resource (ID: {}). " \
                                  "Resource file is missing on irods".format(ts_res.short_id)
                        print("Error:>> {}".format(err_msg))
                        file_missing = True
                        break
                if file_missing:
                    # skip this corrupt timeseries resource for migration
                    continue

            # change the resource_type
            ts_metadata_obj = ts_res.metadata
            ts_res.resource_type = to_resource_type
            ts_res.content_model = to_resource_type.lower()
            ts_res.save()
            # get the converted resource object - CompositeResource
            comp_res = ts_res.get_content_model()

            # set CoreMetaData object for the composite resource
            core_meta_obj = CoreMetaData.objects.create()
            comp_res.content_object = core_meta_obj

            # migrate timeseries resource core metadata elements to composite resource
            migrate_core_meta_elements(ts_metadata_obj, comp_res)

            # update url attribute of the metadata 'type' element
            type_element = comp_res.metadata.type
            type_element.url = '{0}/terms/{1}'.format(current_site_url(),
                                                      to_resource_type)
            type_element.save()
            if create_ts_aggregation:
                # create a Timeseries aggregation
                ts_aggr = None
                try:
                    ts_aggr = TimeSeriesLogicalFile.create(resource=comp_res)
                except Exception as ex:
                    err_msg = 'Failed to create Timeseries aggregation for resource (ID: {})'
                    err_msg = err_msg.format(ts_res.short_id)
                    err_msg = err_msg + '\n' + ex.message
                    logger.error(err_msg)
                    print("Error:>> {}".format(err_msg))

                if ts_aggr is not None:
                    # set aggregation dataset title
                    ts_aggr.dataset_name = comp_res.metadata.title.value
                    ts_aggr.save()
                    # make the res files part of the aggregation
                    for res_file in comp_res.files.all():
                        ts_aggr.add_resource_file(res_file)

                    # migrate timeseries specific metadata to aggregation
                    for site in ts_metadata_obj.sites:
                        site.content_object = ts_aggr.metadata
                        site.save()
                    for variable in ts_metadata_obj.variables:
                        variable.content_object = ts_aggr.metadata
                        variable.save()
                    for method in ts_metadata_obj.methods:
                        method.content_object = ts_aggr.metadata
                        method.save()
                    for proc_level in ts_metadata_obj.processing_levels:
                        proc_level.content_object = ts_aggr.metadata
                        proc_level.save()
                    for ts_result in ts_metadata_obj.time_series_results:
                        ts_result.content_object = ts_aggr.metadata
                        ts_result.save()

                    # create aggregation level coverage elements
                    for coverage in comp_res.metadata.coverages.all():
                        aggr_coverage = Coverage()
                        aggr_coverage.type = coverage.type
                        aggr_coverage._value = coverage._value
                        aggr_coverage.content_object = ts_aggr.metadata
                        aggr_coverage.save()

                    utc_offset = ts_metadata_obj.utc_offset
                    if utc_offset:
                        utc_offset.content_object = ts_aggr.metadata
                        utc_offset.save()

                    ts_aggr.metadata.value_counts = ts_metadata_obj.value_counts
                    ts_aggr.metadata.save()

                    # create aggregation level keywords
                    keywords = [
                        sub.value for sub in comp_res.metadata.subjects.all()
                    ]
                    ts_aggr.metadata.keywords = keywords
                    # set aggregation metadata dirty status to that of the timeseries resource
                    # metadata dirty status - this would trigger netcdf file update for the
                    # new aggregation if metadata is dirty
                    ts_aggr.metadata.is_dirty = ts_metadata_obj.is_dirty
                    ts_aggr.metadata.save()
                    # create aggregation level xml files
                    ts_aggr.create_aggregation_xml_documents()
                    msg = 'One Timeseries aggregation was created in resource (ID: {})'
                    msg = msg.format(comp_res.short_id)
                    logger.info(msg)

            comp_res.save()
            # set resource to dirty so that resource level xml files (resource map and
            # metadata xml files) will be re-generated as part of next bag download
            try:
                set_dirty_bag_flag(comp_res)
            except Exception as ex:
                err_msg = 'Failed to set bag flag dirty for the converted resource (ID: {})'
                err_msg = err_msg.format(ts_res.short_id)
                err_msg = err_msg + '\n' + ex.message
                logger.error(err_msg)
                print("Error:>> {}".format(err_msg))

            resource_counter += 1
            # delete the instance of TimeSeriesMetaData that was part of the original
            # timeseries resource
            ts_metadata_obj.delete()
            msg = 'Timeseries resource (ID: {}) was converted to Composite Resource type'
            msg = msg.format(comp_res.short_id)
            logger.info(msg)

        msg = "{} TIMESERIES RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE.".format(
            resource_counter)
        logger.info(msg)
        print(">> {}".format(msg))
        msg = "THERE ARE CURRENTLY {} TIMESERIES RESOURCES AFTER CONVERSION.".format(
            TimeSeriesResource.objects.all().count())
        logger.info(msg)
        if TimeSeriesResource.objects.all().count() > 0:
            msg = "NOT ALL TIMESERIES RESOURCES WERE CONVERTED TO COMPOSITE RESOURCE TYPE"
            logger.error(msg)
        print(">> {}".format(msg))