def _post_process(self): nodes = self.get_type_assets('node') nodetypes = self.get_type_assets('nodetype') subsites = self.get_type_assets('subsite') osites = self.get_type_assets('osite') sites = self.get_type_assets('site') ssites = self.get_type_assets('ssite') # Make sure all node types have a name for code, obj in nodetypes.iteritems(): if not obj.get('name', None): obj['name'] = "(" + code + ")" # Add rd and parents to ssites for key, ssite in ssites.iteritems(): subsite_rd_list = ssite['subsite_rd_list'] if not 'lat_north' in ssite or not ssite['lat_north']: subsite_objs = [subsites[subsite_id] for subsite_id in subsite_rd_list] bbox = GeoUtils.calc_bounding_box_for_points(subsite_objs, key_mapping=dict(depth="depth_subsite")) ssite.update(bbox) ssite['rd'] = subsite_rd_list[0] ooi_rd = OOIReferenceDesignator(subsite_rd_list[0]) site = sites[ooi_rd.site_rd] osite = osites[site['osite']] if 'ssite_list' not in osite: osite['ssite_list'] = [] osite['ssite_list'].append(key) ssite['parent_id'] = osite['site_rd_list'][0] # Add rd to osites for key, osite in osites.iteritems(): site_rd_list = osite['site_rd_list'] ssite_list = osite.get('ssite_list', []) ssite_objs = [ssites[ss_id] for ss_id in ssite_list] bbox = GeoUtils.calc_bounding_box_for_boxes(ssite_objs) osite.update(bbox) osite['rd'] = site_rd_list[0] # Make sure all nodes have a name and geospatial coordinates for ooi_rd, obj in nodes.iteritems(): if not obj.get('name', None): name = subsites[ooi_rd[:8]]['name'] + " - " + nodetypes[ooi_rd[9:11]]['name'] obj['name'] = name if not obj.get('latitude', None): pass
def _post_process(self): node_objs = self.get_type_assets("node") nodetypes = self.get_type_assets('nodetype') subsites = self.get_type_assets('subsite') osites = self.get_type_assets('osite') sites = self.get_type_assets('site') ssites = self.get_type_assets('ssite') inst_objs = self.get_type_assets("instrument") series_objs = self.get_type_assets("series") pagent_objs = self.get_type_assets("platformagent") # Make sure all node types have a name for code, obj in nodetypes.iteritems(): if not obj.get('name', None): obj['name'] = "(" + code + ")" # Add rd and parents to ssites. Bounding box for key, ssite in ssites.iteritems(): subsite_rd_list = ssite['subsite_rd_list'] if not 'lat_north' in ssite or not ssite['lat_north']: subsite_objs = [subsites[subsite_id] for subsite_id in subsite_rd_list] bbox = GeoUtils.calc_bounding_box_for_points(subsite_objs, key_mapping=dict(depth="depth_subsite")) ssite.update(bbox) ssite['rd'] = subsite_rd_list[0] ooi_rd = OOIReferenceDesignator(subsite_rd_list[0]) site = sites[ooi_rd.site_rd] osite = osites[site['osite']] if 'ssite_list' not in osite: osite['ssite_list'] = [] osite['ssite_list'].append(key) ssite['parent_id'] = osite['site_rd_list'][0] # Add rd to osites. Bounding box for key, osite in osites.iteritems(): site_rd_list = osite['site_rd_list'] ssite_list = osite.get('ssite_list', []) ssite_objs = [ssites[ss_id] for ss_id in ssite_list] bbox = GeoUtils.calc_bounding_box_for_boxes(ssite_objs) osite.update(bbox) osite['rd'] = site_rd_list[0] # Post-process "node" objects: # - Make sure all nodes have a name, geospatial coordinates and platform agent connection info # - Convert available node First Deploy Date and override date into datetime objects for node_id, node_obj in node_objs.iteritems(): if not node_obj.get('name', None): name = subsites[node_id[:8]]['name'] + " - " + nodetypes[node_id[9:11]]['name'] node_obj['name'] = name if not node_obj.get('latitude', None): pass pagent_type = node_obj.get('platform_agent_type', "") pagent_obj = pagent_objs.get(pagent_type, None) if pagent_obj: instrument_agent_rt = pagent_obj['rt_data_path'] == "Direct" data_agent_rt = pagent_obj['rt_data_path'] == "File Transfer" data_agent_recovery = pagent_obj['rt_data_acquisition'] == "Partial" node_obj['instrument_agent_rt'] = instrument_agent_rt node_obj['data_agent_rt'] = data_agent_rt node_obj['data_agent_recovery'] = data_agent_recovery if 'deployment_start' not in node_obj: log.warn("Node %s appears not in mapping spreadsheet - inconsistency?!", node_id) # Parse SAF date node_deploy_date = node_obj.get('First Deployment Date', None) node_obj['SAF_deploy_date'] = self._parse_date(node_deploy_date, DEFAULT_MAX_DATE) # Parse override date if available or set to SAF date node_obj['deploy_date'] = self._parse_date(node_obj.get('deployment_start', None), node_obj['SAF_deploy_date']) # Post-process "instrument" objects: # - Set connection info based on platform platform agent # - Convert available instrument First Deploy Date into datetime objects for inst_id, inst_obj in inst_objs.iteritems(): inst_rd = OOIReferenceDesignator(inst_id) # Parse override date if available or set to SAF date inst_obj['SAF_deploy_date'] = self._parse_date(inst_obj.get('First Deployment Date', None), DEFAULT_MAX_DATE) inst_obj['deploy_date'] = inst_obj['SAF_deploy_date'] # Set instrument connection info based on node platform agent connection and instrument agent series_obj = series_objs[inst_rd.series_rd] node_id = inst_rd.node_rd node_obj = node_objs[node_id] pagent_type = node_obj['platform_agent_type'] pagent_obj = pagent_objs[pagent_type] instrument_agent_rt = (pagent_obj['rt_data_path'] == "Direct") and series_obj['ia_exists'] data_agent_rt = (pagent_obj['rt_data_path'] == "File Transfer") and series_obj['dart_exists'] data_agent_recovery = pagent_obj['rt_data_acquisition'] == "Partial" or not (series_obj['ia_exists'] or series_obj['dart_exists']) inst_obj['ia_rt_data'] = instrument_agent_rt inst_obj['da_rt'] = data_agent_rt inst_obj['da_pr'] = data_agent_recovery # Check all series are in spreadsheet for series_id, series_obj in series_objs.iteritems(): if series_obj.get("tier1", None) is None: log.warn("Series %s appears not in mapping spreadsheet - inconsistency?!", series_id)