Example #1
0
    def _generic_load_ccems(self, stride=None):
        # Generalize attribute value lookup
        plt_name = name.split('_')[1]
        platformName = plt_name[-3:].upper()
        base = getattr(self, plt_name + '_base')
        files = getattr(self, plt_name + '_files')
        parms = getattr(self, plt_name + '_parms')
        nominal_depth = getattr(self, plt_name + '_nominal_depth')
        start_datetime = getattr(self, plt_name + '_start_datetime', None)
        end_datetime = getattr(self, plt_name + '_end_datetime', None)

        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in files], files):
            url = os.path.join(base, f)

            # Monkeypatch featureType depending on file name (or parms...)
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[plt_name],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = start_datetime,
                                    endDatetime = end_datetime,
                                    dataStartDatetime = None)

            loader.include_names = parms
            loader.auxCoords = {}

            if 'adcp' in f.lower() or 'aquadopp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            for p in parms:
                # The timeseries variables 'Hdg_1215', 'Ptch_1216', 'Roll_1217' should have a coordinate of
                # a singleton depth variable, but EPIC files has this as a sensor_depth variable attribute.  
                # Need special handling in the loader for these data.
                if p in ['u_1205', 'v_1206', 'w_1204', 'AGC_1202']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon', 'depth': 'depth'}
                if p in ['Hdg_1215', 'Ptch_1216', 'Roll_1217']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon'}
                else:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat',
                                           'longitude': 'lon', 'depth': 'depth'}
            loader.process_data()
Example #2
0
    def loadCCESIN(self, stride=None):
        '''
        Mooring CCESIN specific load functions
        '''
        platformName = 'CCESIN'
        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in self.ccesin_files], self.ccesin_files):
            url = os.path.join(self.ccesin_base, f)
            ccesin_start_datetime = getattr(self, 'ccesin_start_datetime', None)
            ccesin_end_datetime = getattr(self, 'ccesin_end_datetime', None)

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[platformName.lower()],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = ccesin_start_datetime,
                                    endDatetime = ccesin_end_datetime,
                                    command_line_args = self.args)

            loader.include_names = self.ccesin_parms
            loader.auxCoords = {}
            if 'adcp' in f.lower() or 'aquadopp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
                # The timeseries variables 'Hdg_1215', 'Ptch_1216', 'Roll_1217' should have a coordinate of
                # a singleton depth variable, but EPIC files has this as a sensor_depth variable attribute.  
                # Need special handling in the loader for these data.
                for p in ['u_1205', 'v_1206', 'w_1204', 'AGC_1202']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon', 'depth': 'depth'}
                for p in ['Hdg_1215', 'Ptch_1216', 'Roll_1217']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon'}
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            try:
                loader.process_data()
            except NoValidData as e:
                self.logger.info(str(e))
                continue

            # For timeseriesProfile data we need to pass the nominaldepth of the plaform
            # so that the model is put at the correct depth in the Spatial -> 3D view.
            try:
                self.addPlatformResources('https://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName, nominaldepth=self.ccesin_nominaldepth)
            except AttributeError:
                self.addPlatformResources('https://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName)
Example #3
0
    def loadCCESIN(self, stride=None):
        '''
        Mooring CCESIN specific load functions
        '''
        platformName = 'CCESIN'
        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in self.ccesin_files], self.ccesin_files):
            url = os.path.join(self.ccesin_base, f)
            ccesin_start_datetime = getattr(self, 'ccesin_start_datetime', None)
            ccesin_end_datetime = getattr(self, 'ccesin_end_datetime', None)

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[platformName.lower()],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = ccesin_start_datetime,
                                    endDatetime = ccesin_end_datetime,
                                    command_line_args = self.args)

            loader.include_names = self.ccesin_parms
            loader.auxCoords = {}
            if 'adcp' in f.lower() or 'aquadopp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
                # The timeseries variables 'Hdg_1215', 'Ptch_1216', 'Roll_1217' should have a coordinate of
                # a singleton depth variable, but EPIC files has this as a sensor_depth variable attribute.  
                # Need special handling in the loader for these data.
                for p in ['u_1205', 'v_1206', 'w_1204', 'AGC_1202']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon', 'depth': 'depth'}
                for p in ['Hdg_1215', 'Ptch_1216', 'Roll_1217']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon'}
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader.process_data()

            # For timeseriesProfile data we need to pass the nominaldepth of the plaform
            # so that the model is put at the correct depth in the Spatial -> 3D view.
            try:
                self.addPlatformResources('https://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName, nominaldepth=self.ccesin_nominaldepth)
            except AttributeError:
                self.addPlatformResources('https://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName)
Example #4
0
    def loadCCEBIN(self, stride=None):
        '''
        Mooring CCEBIN specific load functions
        '''
        platformName = 'CCEBIN'
        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in self.ccebin_files], self.ccebin_files):
            url = os.path.join(self.ccebin_base, f)

            dataStartDatetime = None
            if self.args.append:
                # Return datetime of last timevalue - if data are loaded from multiple 
                # activities return the earliest last datetime value
                dataStartDatetime = InstantPoint.objects.using(self.dbAlias).filter(
                                                activity__name=aName).aggregate(
                                                Max('timevalue'))['timevalue__max']
                if dataStartDatetime:
                    # Subract an hour to fill in missing_values at end from previous load
                    dataStartDatetime = dataStartDatetime - timedelta(seconds=3600)

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[platformName.lower()],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = self.ccebin_start_datetime,
                                    endDatetime = self.ccebin_end_datetime,
                                    dataStartDatetime = dataStartDatetime)

            loader.include_names = self.ccebin_parms
            loader.auxCoords = {}
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
                for p in ['u_1205', 'v_1206', 'w_1204', 'AGC_1202', 'Hdg_1215', 'Ptch_1216', 'Roll_1217']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon', 'depth': 'depth'}
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader.process_data()

            # For timeseriesProfile data we need to pass the nominaldepth of the plaform
            # so that the model is put at the correct depth in the Spatial -> 3D view.
            try:
                self.addPlatformResources('http://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName, nominaldepth=self.ccebin_nominaldepth)
            except AttributeError:
                self.addPlatformResources('http://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                                          platformName)
Example #5
0
    def _generic_load_ccems(self, stride=None):
        # Generalize attribute value lookup
        plt_name = name.split('_')[1]
        platformName = plt_name[-3:].upper()
        base = getattr(self, plt_name + '_base')
        files = getattr(self, plt_name + '_files')
        parms = getattr(self, plt_name + '_parms')
        nominal_depth = getattr(self, plt_name + '_nominal_depth')
        start_datetime = getattr(self, plt_name + '_start_datetime')
        end_datetime = getattr(self, plt_name + '_end_datetime')

        stride = stride or self.stride
        for (aName, f) in zip([a + getStrideText(stride) for a in files],
                              files):
            url = os.path.join(base, f)

            # Monkeypatch featureType depending on file name (or parms...)
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader = Mooring_Loader(
                url=url,
                campaignName=self.campaignName,
                campaignDescription=self.campaignDescription,
                dbAlias=self.dbAlias,
                activityName=aName,
                activitytypeName='Mooring Deployment',
                platformName=platformName,
                platformColor=self.colors[plt_name],
                platformTypeName='mooring',
                stride=stride,
                startDatetime=start_datetime,
                endDatetime=end_datetime,
                dataStartDatetime=None)

            loader.include_names = parms
            loader.auxCoords = {}
            for p in parms:
                loader.auxCoords[p] = {
                    'time': 'time',
                    'latitude': 'lat',
                    'longitude': 'lon',
                    'depth': 'depth'
                }
            loader.process_data()
Example #6
0
    def _generic_load_ccems(self, stride=None):
        # Generalize attribute value lookup
        plt_name = name.split('_')[1]
        platformName = plt_name[-3:].upper()
        base = getattr(self, plt_name + '_base')
        files = getattr(self, plt_name + '_files')
        parms = getattr(self, plt_name + '_parms')
        nominal_depth = getattr(self, plt_name + '_nominal_depth')
        start_datetime = getattr(self, plt_name + '_start_datetime')
        end_datetime = getattr(self, plt_name + '_end_datetime')

        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in files], files):
            url = os.path.join(base, f)

            # Monkeypatch featureType depending on file name (or parms...)
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[plt_name],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = start_datetime,
                                    endDatetime = end_datetime,
                                    dataStartDatetime = None)

            loader.include_names = parms
            loader.auxCoords = {}
            for p in parms:
                loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat',
                                       'longitude': 'lon', 'depth': 'depth'}
            loader.process_data()
Example #7
0
    def loadStationData(self, stride=1):
        '''Crawl the OceanSITES Mooring data TDS for OPeNDAP links and load into STOQS
        '''
        urls = []
        strides = {}
        for dataSet in self.dataSets:
            c = Crawl(dataSet[0], select=dataSet[1], debug=self.args.verbose)
            dsUrls = [s.get("url") for d in c.datasets for s in d.services if s.get("service").lower() == "opendap"]
            for dsu in dsUrls:
                strides[dsu] = dataSet[2]
            urls += dsUrls

        # First pass through urls matching OceanSITES pattern to collect platform names to get colors
        # Use OceanSITES naming convention for platform "OS_<platformName>_xxx_R|D_<type>.nc"
        pNames = set()
        platfrormTypeNames = set()
        for url in urls:
            platfrormTypeNames.add(url.split('/')[-2])
            if url.find('MOVE1_') != -1:
                # Special hack for MOVE PlatformCode
                newUrl = url.replace('MOVE1_', 'MOVE1-')
                pNames.add(newUrl.split('/')[-1].split('.')[0].split('_')[1])
            else:
                pNames.add(url.split('/')[-1].split('.')[0].split('_')[1])

        # Assign colors by platformTypeName
        pColors = {}
        for ptName, color in zip(sorted(platfrormTypeNames), self.getColor(len(platfrormTypeNames))) :
            pColors[ptName] = color

        # Now loop again, this time loading the data 
        for url in urls:
            logger.info("Executing runMooringLoader with url = %s", url)
            if self.args.optimal_stride and strides[url]:
                stride = strides[url] 
            elif self.args.test:
                stride = strides[url] * 2

            fixedUrl = url
            if url.find('OS_IMOS-EAC_EAC') != -1:
                # Special fix to get platform name 
                fixedUrl = url.replace('OS_IMOS-EAC_EAC', 'OS_IMOS-EAC-EAC')

            if stride > 1:
                aName = fixedUrl.split('/')[-1].split('.')[0] + '(stride=%d)' % stride
            else:
                aName = fixedUrl.split('/')[-1].split('.')[0]

            pName = aName.split('_')[1]
            ptName = url.split('/')[-2]
   
            logger.debug("Instantiating Mooring_Loader for url = %s", url)
            ml = Mooring_Loader(
                url = url,
                campaignName = self.campaignName,
                campaignDescription = self.campaignDescription,
                dbAlias = self.dbAlias,
                activityName = aName,
                activitytypeName = 'Mooring Deployment',
                platformName = pName,
                platformColor = pColors[ptName],
                platformTypeName = ptName,
                stride = stride,
                startDatetime = self.startDatetime,
                dataStartDatetime = None,
                endDatetime = self.endDatetime)

            # Special fixes for non standard metadata and if files don't contain the standard TEMP and PSAL parameters
            if url.find('MBARI-') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH'}
            elif url.find('OS_PAPA_2009PA003_D_CTD_10min') != -1:
                ml.include_names = ['TEMP']
            elif url.find('OS_PAPA_2009PA003_D_PSAL_1hr') != -1:
                ml.include_names = ['PSAL']
            elif url.find('OS_SOTS_SAZ-15-2012_D_microcat-4422m') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                # DEPTH_CN_PR_PS_TE coordinate missing standard_name attribute
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH_CN_PR_PS_TE'}
                # Only global attribute is 'cdm_data_type: Time-series'; monkey-patch the method
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'
            elif url.find('D_MICROCAT-PART') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH'}
            elif url.find('D_RDI-WORKHORSE-ADCP-') != -1:
                ml.include_names = ['UCUR', 'VCUR', 'WCUR']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'HEIGHT_ABOVE_SENSOR'}
                # Metadata in file states 'timeseries', but it's really something different; monkey-patch the getFeatureType() method
                Mooring_Loader.getFeatureType = lambda self: 'trajectoryprofile'
            elif url.find('TVSM_dy.nc') != -1:
                ##ml.include_names = ['UCUR', 'VCUR', 'TEMP', 'PSAL', 'CSPD', 'CDIR']
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ('UCUR', 'VCUR', 'CSPD', 'CDIR'):
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPCUR'}
                for v in ('TEMP',):
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH'}
                for v in ('PSAL',):
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPPSAL'}
                # These PIRATA daily files are timeSeriesProfile which hsa no featureType attribute
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            elif url.find('CCE') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH'}
            elif url.find('NOG') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'
            elif url.find('Stratus') != -1:
                # Variable attrubute coordinates: TIME, DEPTH, LATITUDE, LONGITUDE; it should not contain commas
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {'time': 'TIME', 'latitude': 'LATITUDE', 'longitude': 'LONGITUDE', 'depth': 'DEPTH'}
            else:
                ml.include_names = ['TEMP', 'PSAL']
    
            try:
                (nMP, path, parmCountHash) = ml.process_data()
                logger.debug("Loaded Activity with name = %s", aName)
            except NoValidData, e:
                logger.warning(e)
Example #8
0
    def loadStationData(self, stride=1):
        '''Crawl the OceanSITES Mooring data TDS for OPeNDAP links and load into STOQS
        '''
        urls = []
        strides = {}
        for dataSet in self.dataSets:
            c = Crawl(dataSet[0], select=dataSet[1], debug=self.args.verbose)
            dsUrls = [
                s.get("url") for d in c.datasets for s in d.services
                if s.get("service").lower() == "opendap"
            ]
            for dsu in dsUrls:
                strides[dsu] = dataSet[2]
            urls += dsUrls

        # First pass through urls matching OceanSITES pattern to collect platform names to get colors
        # Use OceanSITES naming convention for platform "OS_<platformName>_xxx_R|D_<type>.nc"
        pNames = set()
        platfrormTypeNames = set()
        for url in urls:
            platfrormTypeNames.add(url.split('/')[-2])
            if url.find('MOVE1_') != -1:
                # Special hack for MOVE PlatformCode
                newUrl = url.replace('MOVE1_', 'MOVE1-')
                pNames.add(newUrl.split('/')[-1].split('.')[0].split('_')[1])
            else:
                pNames.add(url.split('/')[-1].split('.')[0].split('_')[1])

        # Assign colors by platformTypeName
        pColors = {}
        for ptName, color in zip(sorted(platfrormTypeNames),
                                 self.getColor(len(platfrormTypeNames))):
            pColors[ptName] = color

        # Now loop again, this time loading the data
        for url in urls:
            logger.info("Executing runMooringLoader with url = %s", url)
            if self.args.optimal_stride and strides[url]:
                stride = strides[url]
            elif self.args.test:
                stride = strides[url] * 2

            fixedUrl = url
            if url.find('OS_IMOS-EAC_EAC') != -1:
                # Special fix to get platform name
                fixedUrl = url.replace('OS_IMOS-EAC_EAC', 'OS_IMOS-EAC-EAC')

            if stride > 1:
                aName = fixedUrl.split('/')[-1].split(
                    '.')[0] + '(stride=%d)' % stride
            else:
                aName = fixedUrl.split('/')[-1].split('.')[0]

            pName = aName.split('_')[1]
            ptName = url.split('/')[-2]

            logger.debug("Instantiating Mooring_Loader for url = %s", url)
            try:
                ml = Mooring_Loader(
                    url=url,
                    campaignName=self.campaignName,
                    campaignDescription=self.campaignDescription,
                    dbAlias=self.dbAlias,
                    activityName=aName,
                    activitytypeName='Mooring Deployment',
                    platformName=pName,
                    platformColor=pColors[ptName],
                    platformTypeName=ptName,
                    stride=stride,
                    startDatetime=self.startDatetime,
                    dataStartDatetime=None,
                    endDatetime=self.endDatetime)
            except UnicodeDecodeError as e:
                logger.warn(str(e))
                logger.warn(f'Cannot read data from {url}')
                continue

            # Special fixes for non standard metadata and if files don't contain the standard TEMP and PSAL parameters
            if url.find('MBARI-') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH'
                    }
            elif url.find('OS_PAPA_2009PA003_D_CTD_10min') != -1:
                ml.include_names = ['TEMP']
            elif url.find('OS_PAPA_2009PA003_D_PSAL_1hr') != -1:
                ml.include_names = ['PSAL']
            elif url.find('OS_SOTS_SAZ-15-2012_D_microcat-4422m') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                # DEPTH_CN_PR_PS_TE coordinate missing standard_name attribute
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH_CN_PR_PS_TE'
                    }
                # Only global attribute is 'cdm_data_type: Time-series'; monkey-patch the method
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'
            elif url.find('D_MICROCAT-PART') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH'
                    }
            elif url.find('D_RDI-WORKHORSE-ADCP-') != -1:
                ml.include_names = ['UCUR', 'VCUR', 'WCUR']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'HEIGHT_ABOVE_SENSOR'
                    }
                # Metadata in file states 'timeseries', but it's really something different; monkey-patch the getFeatureType() method
                Mooring_Loader.getFeatureType = lambda self: 'trajectoryprofile'
            elif url.find('TVSM_dy.nc') != -1:
                ##ml.include_names = ['UCUR', 'VCUR', 'TEMP', 'PSAL', 'CSPD', 'CDIR']
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ('UCUR', 'VCUR', 'CSPD', 'CDIR'):
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPCUR'
                    }
                for v in ('TEMP', ):
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH'
                    }
                for v in ('PSAL', ):
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPPSAL'
                    }
                # These PIRATA daily files are timeSeriesProfile which hsa no featureType attribute
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            elif url.find('CCE') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH'
                    }
            elif url.find('NOG') != -1:
                ml.include_names = ['TEMP', 'PSAL']
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'
            elif url.find('Stratus') != -1:
                # Variable attrubute coordinates: TIME, DEPTH, LATITUDE, LONGITUDE; it should not contain commas
                ml.include_names = ['TEMP', 'PSAL']
                ml.auxCoords = {}
                for v in ml.include_names:
                    ml.auxCoords[v] = {
                        'time': 'TIME',
                        'latitude': 'LATITUDE',
                        'longitude': 'LONGITUDE',
                        'depth': 'DEPTH'
                    }
            else:
                ml.include_names = ['TEMP', 'PSAL']

            try:
                (nMP, path, parmCountHash) = ml.process_data()
                logger.debug("Loaded Activity with name = %s", aName)
            except NoValidData as e:
                logger.warning(e)
Example #9
0
    def _generic_load_ccems(self, stride=None):
        # Generalize attribute value lookup
        plt_name = name.split('_')[1]
        platformName = plt_name[-3:].upper()
        base = getattr(self, plt_name + '_base')
        files = getattr(self, plt_name + '_files')
        parms = getattr(self, plt_name + '_parms')
        nominal_depth = getattr(self, plt_name + '_nominal_depth')
        start_datetime = getattr(self, plt_name + '_start_datetime', None)
        end_datetime = getattr(self, plt_name + '_end_datetime', None)

        stride = stride or self.stride
        for (aName, f) in zip([ a + getStrideText(stride) for a in files], files):
            if '_ProcessedWaves' in f:
                if stride != 1:
                    # Most files are high time frequency ADCP data, so accept the generic stride
                    # Override for files like MBCCE_MS0_AWAC_20160408_ProcessedWaves.nc, which has 2 hour sampling
                    stride = 1
                    aName = f
                    self.logger.info(f"Overriding stride -> = {stride} for file {f}")
                else:
                    self.logger.info(f"Skipping {f} with stride = 1 as the entire 2 hour data set is loaded")

            url = os.path.join(base, f)

            # Monkeypatch featureType depending on file name (or parms...)
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader = Mooring_Loader(url = url, 
                                    campaignName = self.campaignName,
                                    campaignDescription = self.campaignDescription,
                                    dbAlias = self.dbAlias,
                                    activityName = aName,
                                    activitytypeName = 'Mooring Deployment',
                                    platformName = platformName,
                                    platformColor = self.colors[plt_name],
                                    platformTypeName = 'mooring',
                                    stride = stride,
                                    startDatetime = start_datetime,
                                    endDatetime = end_datetime,
                                    dataStartDatetime = None)

            loader.include_names = parms
            loader.auxCoords = {}

            if 'adcp' in f.lower() or 'aquadopp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            for p in parms:
                # The timeseries variables 'Hdg_1215', 'Ptch_1216', 'Roll_1217' should have a coordinate of
                # a singleton depth variable, but EPIC files has this as a sensor_depth variable attribute.  
                # Need special handling in the loader for these data.
                if p in ['u_1205', 'v_1206', 'w_1204', 'AGC_1202']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon', 'depth': 'depth'}
                if p in ['Hdg_1215', 'Ptch_1216', 'Roll_1217']:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat', 'longitude': 'lon'}
                else:
                    loader.auxCoords[p] = {'time': 'time', 'latitude': 'lat',
                                           'longitude': 'lon', 'depth': 'depth'}
            try:
                loader.process_data()
            except NoValidData as e:
                self.logger.info(str(e))
                continue
Example #10
0
    def loadCCEBIN(self, stride=None):
        '''
        Mooring CCEBIN specific load functions
        '''
        platformName = 'CCEBIN'
        stride = stride or self.stride
        for (aName,
             f) in zip([a + getStrideText(stride) for a in self.ccebin_files],
                       self.ccebin_files):
            url = os.path.join(self.ccebin_base, f)

            dataStartDatetime = None
            if self.args.append:
                # Return datetime of last timevalue - if data are loaded from multiple
                # activities return the earliest last datetime value
                dataStartDatetime = InstantPoint.objects.using(
                    self.dbAlias).filter(activity__name=aName).aggregate(
                        Max('timevalue'))['timevalue__max']
                if dataStartDatetime:
                    # Subract an hour to fill in missing_values at end from previous load
                    dataStartDatetime = dataStartDatetime - timedelta(
                        seconds=3600)

            loader = Mooring_Loader(
                url=url,
                campaignName=self.campaignName,
                campaignDescription=self.campaignDescription,
                dbAlias=self.dbAlias,
                activityName=aName,
                activitytypeName='Mooring Deployment',
                platformName=platformName,
                platformColor=self.colors[platformName.lower()],
                platformTypeName='mooring',
                stride=stride,
                startDatetime=self.ccebin_start_datetime,
                endDatetime=self.ccebin_end_datetime,
                dataStartDatetime=dataStartDatetime)

            loader.include_names = self.ccebin_parms
            loader.auxCoords = {}
            if 'adcp' in f.lower():
                Mooring_Loader.getFeatureType = lambda self: 'timeseriesprofile'
                for p in [
                        'u_1205', 'v_1206', 'w_1204', 'AGC_1202', 'Hdg_1215',
                        'Ptch_1216', 'Roll_1217'
                ]:
                    loader.auxCoords[p] = {
                        'time': 'time',
                        'latitude': 'lat',
                        'longitude': 'lon',
                        'depth': 'depth'
                    }
            else:
                Mooring_Loader.getFeatureType = lambda self: 'timeseries'

            loader.process_data()

            # For timeseriesProfile data we need to pass the nominaldepth of the plaform
            # so that the model is put at the correct depth in the Spatial -> 3D view.
            try:
                self.addPlatformResources(
                    'http://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                    platformName,
                    nominaldepth=self.ccebin_nominaldepth)
            except AttributeError:
                self.addPlatformResources(
                    'http://stoqs.mbari.org/x3d/cce_bin_assem/cce_bin_assem_src_scene.x3d',
                    platformName)