def get_vocab(): """ Get 'vocab_dict' from cache or compiled, return vocab_dict. """ debug = False vocab_dict = {} vocab_codes = {} try: # Get 'vocab_dict' if cached dict_cached = cache.get('vocab_dict') if dict_cached: vocab_dict = dict_cached # Get 'vocab_codes' if cached codes_cached = cache.get('vocab_codes') if codes_cached: vocab_codes = codes_cached # If either 'vocab_dict' or 'vocab_codes' is not cached, get and place in cache. if not vocab_dict or not vocab_codes: if debug: print '\n Cache vocabulary...' vocab_dict, codes = compile_vocab() cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT) cache.set('vocab_codes', codes, timeout=CACHE_TIMEOUT) if debug: print '\n Cached vocabulary...' return vocab_dict except Exception as err: message = str(err) current_app.logger.info(message) raise Exception(message)
def delete_asset(id): """ Delete an asset by id. """ this_asset = "" try: url = current_app.config['UFRAME_ASSETS_URL'] + '/assets/%s' % str(id) response = requests.delete(url, headers=_uframe_headers()) asset_cache = cache.get('asset_list') if asset_cache: cache.delete('asset_list') for row in asset_cache: if row['id'] == id: this_asset = row break if this_asset: cache.delete('asset_list') asset_cache.remove(this_asset) cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except ConnectionError: message = 'ConnectionError during delete asset.' current_app.logger.info(message) return bad_request(message) except Timeout: message = 'Timeout during during delete asset.' current_app.logger.info(message) return bad_request(message) except Exception as err: message = str(err) current_app.logger.info(message) return bad_request(message)
def get_assets_payload(): """ Get all assets from uframe, process in ooi-ui-services list of assets (asset_list) and assets_dict by (key) asset id. Update cache for asset_list and assets_dict. """ try: get_vocab() # Get uframe connect and timeout information uframe_url, timeout, timeout_read = get_uframe_assets_info() url = '/'.join([uframe_url, 'assets']) payload = requests.get(url, timeout=(timeout, timeout_read)) if payload.status_code != 200: message = '(%d) Failed to get uframe assets.' % payload.status current_app.logger.info(message) return internal_server_error(message) result = payload.json() data, assets_dict = _compile_assets(result) if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) data = cache.get('asset_list') return data except requests.exceptions.ConnectionError as err: message = "ConnectionError getting uframe assets; %s" % str(err) current_app.logger.info(message) return internal_server_error(message) except requests.exceptions.Timeout as err: message = "Timeout getting uframe assets; %s" % str(err) current_app.logger.info(message) return internal_server_error(message) except Exception as err: message = "Error getting uframe assets; %s" % str(err) current_app.logger.info(message) raise
def delete_asset(id): ''' Delete an asset by providing the id ''' thisAsset = "" try: url = current_app.config['UFRAME_ASSETS_URL']\ + '/%s/%s' % ('assets', id) response = requests.delete(url, headers=_uframe_headers()) asset_cache = cache.get('asset_list') if asset_cache: cache.delete('asset_list') for row in asset_cache: if row['id'] == id: thisAsset = row asset_cache.remove(thisAsset) cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500)
def create_asset(): ''' Create a new asset, the return will be right from uframe if all goes well. Either a success or an error message. Login required. ''' data = json.loads(request.data) uframe_obj = uFrameAssetCollection() post_body = uframe_obj.from_json(data) post_body.pop('assetId') #post_body.pop('metaData') post_body.pop('lastModifiedTimestamp') post_body.pop('manufacturerInfo') post_body.pop('attachments') post_body.pop('classCode') post_body.pop('seriesClassification') post_body.pop('purchaseAndDeliveryInfo') #return json.dumps(post_body) uframe_assets_url = _uframe_url(uframe_obj.__endpoint__) #return uframe_assets_url response = requests.post(uframe_assets_url, data=json.dumps(post_body), headers=_uframe_headers()) if response.status_code == 201: json_response = json.loads(response.text) data['id'] = json_response['id'] asset_cache = cache.get('asset_list') cache.delete('asset_list') if asset_cache: asset_cache.append(data) cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code
def update_asset(id): ''' Update an existing asset, the return will be right from uframe if all goes well. Either a success or an error message. Login required. ''' try: data = json.loads(request.data) uframe_obj = UFrameAssetsCollection() put_body = uframe_obj.from_json(data) uframe_assets_url = _uframe_url(uframe_obj.__endpoint__, id) response = requests.put(uframe_assets_url, data=json.dumps(put_body), headers=_uframe_headers()) if response.status_code == 200: asset_cache = cache.get('asset_list') cache.delete('asset_list') if asset_cache: for row in asset_cache: if row['id'] == id: row.update(data) if "error" not in asset_cache: cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500)
def delete_asset(id): ''' Delete an asset by providing the id ''' try: uframe_obj = UFrameAssetsCollection() uframe_assets_url = _uframe_url(uframe_obj.__endpoint__, id) response = requests.delete(uframe_assets_url, headers=_uframe_headers()) if response.status_code == 200: asset_cache = cache.get('asset_list') cache.delete('asset_list') if asset_cache: for row in asset_cache: if row['id'] == id: thisAsset = row asset_cache.remove(thisAsset) if "error" not in asset_cache: cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500)
def update_asset(id): try: data = json.loads(request.data) if 'asset_class' in data: data['@class'] = data.pop('asset_class') url = current_app.config['UFRAME_ASSETS_URL']\ + '/%s/%s' % ('assets', id) response = requests.put(url, data=json.dumps(data), headers=_uframe_headers()) if response.status_code == 200: asset_cache = cache.get('asset_list') data_list = [] data_list.append(data) data = _compile_assets(data_list) if asset_cache: cache.delete('asset_list') for row in asset_cache: if row['id'] == id: row.update(data[0]) if "error" not in asset_cache: cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500)
def create_asset(): """ Create a new asset, the return will be uframe asset format (not ooi-ui-services format). Cache ('asset_list') is updated with new asset Either a success or an error message. Login required. """ debug = False try: data = json.loads(request.data) if valid_create_asset_request_data(data): if debug: print '\n debug validated required fields...' url = current_app.config['UFRAME_ASSETS_URL'] + '/%s' % 'assets' if 'lastModifiedTimestamp' in data: del data['lastModifiedTimestamp'] if 'asset_class' in data: data['@class'] = data.pop('asset_class') # Create asset in uframe response = requests.post(url, data=json.dumps(data), headers=_uframe_headers()) if response.status_code == 201: json_response = json.loads(response.text) data['assetId'] = json_response['id'] data['tense'] = 'NEW' data_list = [data] try: compiled_data, _ = _compile_assets(data_list) except Exception: raise if not compiled_data or compiled_data is None: raise Exception('_compile_assets returned empty or None result.') # Update asset cache ('asset_list') asset_cache = cache.get('asset_list') if asset_cache: cache.delete('asset_list') asset_cache.append(compiled_data[0]) cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) else: return bad_request('Failed to create asset!') return response.text, response.status_code except ConnectionError: message = 'ConnectionError during create asset.' current_app.logger.info(message) return bad_request(message) except Timeout: message = 'Timeout during during create asset.' current_app.logger.info(message) return bad_request(message) except Exception as err: message = str(err) current_app.logger.info(message) return bad_request(message)
def get_events(): ''' -- M@C 05/12/2015 Added to support event query on stream data. ''' if 'ref_des' in request.args: events = _get_events_by_ref_des(request.args.get('ref_des')) return events else: ''' Listing GET request of all events. This method is cached for 1 hour. ''' #set up all the contaners. data = {} #Manually set up the cache cached = cache.get('event_list') if cached: data = cached #create uframe instance, and fetch the data. uframe_obj = uFrameEventCollection() data = uframe_obj.to_json() try: for row in data: row['id'] = row.pop('eventId') row['class'] = row.pop('@class') #parse the result and assign ref_des as top element. except (KeyError, TypeError, AttributeError): pass if "error" not in data: cache.set('event_list', data, timeout=CACHE_TIMEOUT) #data = sorted(data, key=itemgetter('id')) if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in data: if search_term.lower() in str(item['class']).lower(): return_list.append(item) if search_term.lower() in str(item['id']): return_list.append(item) #if search_term.lower() in str(item['referenceDesignator']).lower(): # return_list.append(item) if search_term.lower() in str(item['startDate']).lower(): return_list.append(item) #if search_term.lower() in str(item['assetInfo']['owner']).lower(): #return_list.append(item) data = return_list result = jsonify({ 'events' : data }) return result
def get_events(): ''' -- M@C 05/12/2015 Added to support event query on stream data. ''' if 'ref_des' in request.args: events = _get_events_by_ref_des(request.args.get('ref_des')) return events else: ''' Listing GET request of all events. This method is cached for 1 hour. ''' #set up all the contaners. data = {} #Manually set up the cache cached = cache.get('event_list') if cached: data = cached #create uframe instance, and fetch the data. uframe_obj = uFrameEventCollection() data = uframe_obj.to_json() try: for row in data: row['id'] = row.pop('eventId') row['class'] = row.pop('@class') #parse the result and assign ref_des as top element. except (KeyError, TypeError, AttributeError): pass if "error" not in data: cache.set('event_list', data, timeout=CACHE_TIMEOUT) #data = sorted(data, key=itemgetter('id')) if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in data: if search_term.lower() in str(item['class']).lower(): return_list.append(item) if search_term.lower() in str(item['id']): return_list.append(item) #if search_term.lower() in str(item['referenceDesignator']).lower(): # return_list.append(item) if search_term.lower() in str(item['startDate']).lower(): return_list.append(item) #if search_term.lower() in str(item['assetInfo']['owner']).lower(): #return_list.append(item) data = return_list result = jsonify({'events': data}) return result
def update_asset(id): """ Update asset by id. Last writer wins; new format of request.data to be handled (post 5/31): {"assetInfo.array":"EnduranceAss","assetInfo.assembly":"testass","oper":"edit","id":"227"} """ try: data = json.loads(request.data) if 'asset_class' in data: data['@class'] = data.pop('asset_class') url = current_app.config['UFRAME_ASSETS_URL'] + '/%s/%s' % ('assets', id) response = requests.put(url, data=json.dumps(data), headers=_uframe_headers()) if response.status_code != 200: message = '(%d) Failed to update asset %d.' % (response.status_code, id) return bad_request(message) if response.status_code == 200: data_list = [data] try: compiled_data, _ = _compile_assets(data_list) except Exception: raise if not compiled_data or compiled_data is None: raise Exception('_compile_assets returned empty or None result.') asset_cache = cache.get('asset_list') if "error" in asset_cache: message = 'Error returned in \'asset_list\' cache; unable to update cache.' return bad_request(message) if asset_cache: cache.delete('asset_list') for row in asset_cache: if row['id'] == id: row.update(compiled_data[0]) break cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except ConnectionError: message = 'Error: ConnectionError during update asset request (id: %d)' % id current_app.logger.info(message) return bad_request(message) except Timeout: message = 'Error: Timeout during during update asset request (id: %d)' % id current_app.logger.info(message) return bad_request(message) except Exception as err: message = str(err) current_app.logger.info(message) return bad_request(message)
def _get_all_assets(): """ Get all assets (complete or incomplete) (in ooi-ui-services format). """ try: # Get 'good' assets asset_cache = cache.get('asset_list') if asset_cache: asset_data = asset_cache else: try: asset_data = get_assets_payload() cache.set('asset_list', asset_data, timeout=CACHE_TIMEOUT) except Exception as err: message = err.message raise Exception(message) # Get 'bad' assets bad_asset_cache = cache.get('bad_asset_list') if bad_asset_cache: bad_asset_data = bad_asset_cache else: data = get_assets_from_uframe() try: bad_asset_data = _compile_bad_assets(data) cache.set('bad_asset_list', bad_asset_data, timeout=CACHE_TIMEOUT) except Exception as err: message = err.message raise Exception(message) result_data = asset_data + bad_asset_data if result_data: result_data.sort() return result_data except Exception as err: raise
def delete_asset(id): ''' Delete an asset by providing the id ''' uframe_obj = uFrameAssetCollection() uframe_assets_url = _uframe_url(uframe_obj.__endpoint__, id) response = requests.delete(uframe_assets_url, headers=_uframe_headers()) if response.status_code == 200: asset_cache = cache.get('asset_list') cache.delete('asset_list') if asset_cache: for row in asset_cache: if row['id'] == id: thisAsset = row asset_cache.remove(thisAsset) if "error" not in asset_cache: cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code
def _get_bad_assets(): """ Get all 'bad' assets (in ooi-ui-services format) """ try: bad_asset_cache = cache.get('bad_asset_list') if bad_asset_cache: result_data = bad_asset_cache else: data = get_assets_from_uframe() try: result_data = _compile_bad_assets(data) cache.set('bad_asset_list', result_data, timeout=CACHE_TIMEOUT) except Exception as err: message = err.message raise Exception(message) return result_data except Exception as err: raise
def create_asset(): ''' Create a new asset, the return will be right from uframe if all goes well. Either a success or an error message. Login required. ''' try: data = json.loads(request.data) url = current_app.config['UFRAME_ASSETS_URL']\ + '/%s' % ('assets') if 'lastModifiedTimestamp' in data: del data['lastModifiedTimestamp'] if 'asset_class' in data: data['@class'] = data.pop('asset_class') response = requests.post(url, data=json.dumps(data), headers=_uframe_headers()) if response.status_code == 201: json_response = json.loads(response.text) data['id'] = json_response['id'] data['tense'] = 'NEW' data_list = [] data_list.append(data) data = _compile_assets(data_list) asset_cache = cache.get('asset_list') if asset_cache: cache.delete('asset_list') asset_cache.append(data[0]) cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e return make_response(error, 500)
def update_asset(id): ''' Update an existing asset, the return will be right from uframe if all goes well. Either a success or an error message. Login required. ''' data = json.loads(request.data) uframe_obj = uFrameAssetCollection() put_body = uframe_obj.from_json(data) uframe_assets_url = _uframe_url(uframe_obj.__endpoint__, id) response = requests.put(uframe_assets_url, data=json.dumps(put_body), headers=_uframe_headers()) if response.status_code == 200: asset_cache = cache.get('asset_list') cache.delete('asset_list') if asset_cache: for row in asset_cache: if row['id'] == id: row.update(data) if "error" not in asset_cache: cache.set('asset_list', asset_cache, timeout=CACHE_TIMEOUT) return response.text, response.status_code
def build_display_name(rd): """ Get display name for reference designator using the codes dictionary. """ debug = False is_rs = False try: # Get 'vocab_dict' if cached, if not cached build cache, set and continue dict_cached = cache.get('vocab_codes') if dict_cached: vocab_codes = dict_cached else: vocab_dict, vocab_codes = compile_vocab() cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT) cache.set('vocab_codes', vocab_codes, timeout=CACHE_TIMEOUT) # Verify 'vocab_codes' has content, otherwise error if not vocab_codes: message = 'Vocabulary processing failed to obtain vocab_codes dictionary, unable to process.' current_app.logger.info(message) return None # Process reference designator using 'vocab_dict' and 'vocab_codes' len_rd = len(rd) if len_rd < 8: return None # Build display name for instrument """ rs_code = None array_code = rd[:2] if array_code == 'RS': is_rs = True rs_code = rd[:8] """ if len_rd == 27: if debug: print '\n (build display name) reference designator \'%s\'.' % rd subsite, node, instr = rd.split('-', 2) port, instrument = instr.split('-') instr_class = instrument[0:5] line4 = None if instr_class in vocab_codes['classes']: line4 = vocab_codes['classes'][instr_class] if line4 is None: return None result = line4 # Build display name for platform (subsite = 'CE01ISSM', node = 'MFC31') elif len_rd == 14: subsite, node = rd.split('-') node_code = node[0:2] subsite_code = subsite[4:8] line2 = None # Added if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] line3 = None if node_code in vocab_codes['nodes']: line3 = vocab_codes['nodes'][node_code] #if line3 is None: if line2 is None or line3 is None: return None result = ' - '.join([line2, line3]) # Build display name for mooring elif len_rd == 8: subsite = rd subsite_code = subsite[4:8] """ line1 = None if is_rs: if rs_code in vocab_codes['rs_array_names']: line1 = vocab_codes['rs_array_names'][rs_code] else: if array_code in vocab_codes['arrays']: line1 = vocab_codes['arrays'][array_code] """ line2 = None if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] #if line1 is None or line2 is None: if line2 is None: return None #result = ' '.join([line1, line2]) result = line2 # Make display name for irregular reference designator elif len_rd > 14 and len_rd < 27: if debug: print '\n (build display name) Irregular reference designator \'%s\'.' % rd subsite, node, instr = rd.split('-', 2) if not instr: message = 'Reference designator \'%s\' is malformed; unable to discern sensor.' % rd raise(message) port, instrument = instr.split('-') if not instrument: message = 'Reference designator \'%s\' is malformed; unable to discern instrument class.' % rd raise(message) instr_class = None if instrument: if len(instrument) > 5: instr_class = instrument[0:5] else: instr_class = instrument line4 = None if instr_class in vocab_codes['classes']: line4 = vocab_codes['classes'][instr_class] if line4 is None: return None result = line4 else: return None #print '\n\t ***** debug -- build_display_name -- result: ', result return result except Exception as err: message = 'Exception in build display name for %s; %s' % (rd, str(err)) current_app.logger.info(message) return None
def get_assets(use_min=False, normal_data=False, reset=False): ''' Listing GET request of all assets. This method is cached for 1 hour. add in helped params to bypass the json response and minification ''' try: cached = cache.get('asset_list') if cached and reset is not True: data = cached else: url = current_app.config['UFRAME_ASSETS_URL']\ + '/%s' % ('assets') payload = requests.get(url) if payload.status_code != 200: try: return jsonify({"assets": payload.json()}),\ payload.status_code except AttributeError: try: return jsonify({"assets": 'Undefined response'}),\ payload.status_code except Exception as e: return make_response( "unhandled exception: %s. Line # %s" % (e, sys.exc_info()[2].tb_lineno), 500) data = payload.json() data = _compile_assets(data) if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500) try: sort_by = '' if request.args.get('sort') and request.args.get('sort') != "": sort_by = request.args.get('sort') else: sort_by = 'ref_des' data = sorted(data, key=itemgetter(sort_by)) except Exception as e: print e pass if request.args.get('min') == 'True' or use_min is True: showDeployments = False deploymentEvents = [] if request.args.get('deployments') == 'True': showDeployments = True for obj in data: try: if 'metaData' in obj: del obj['metaData'] if 'events' in obj: if showDeployments and obj['events'] is not None: for event in obj['events']: if event['eventClass'] == '.DeploymentEvent': deploymentEvents.append(event) del obj['events'] obj['events'] = deploymentEvents deploymentEvents = [] else: del obj['events'] if 'manufactureInfo' in obj: del obj['manufactureInfo'] if 'notes' in obj: del obj['notes'] if 'physicalInfo' in obj: del obj['physicalInfo'] if 'attachments' in obj: del obj['attachments'] if 'purchaseAndDeliveryInfo' in obj: del obj['purchaseAndDeliveryInfo'] if 'lastModifiedTimestamp' in obj: del obj['lastModifiedTimestamp'] except Exception: raise if request.args.get('concepts') and request.args.get('concepts') != "": return_list = [] search_term = str(request.args.get('concepts')).split() search_set = set(search_term) for subset in search_set: for item in data: print item['ref_des'] if subset.lower() in str(item['ref_des']).lower(): return_list.append(item) data = return_list if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = str(request.args.get('search')).split() search_set = set(search_term) try: for subset in search_set: if len(return_list) > 0: ven_subset = [] return_list = deepcopy(data) for item in return_list: if subset.lower() in\ str(item['assetInfo']['name']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['assetInfo']['longName']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['ref_des']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['assetInfo']['type']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['assetInfo']['array']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['events']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['metaData']).lower(): ven_subset.append(item) elif subset.lower() in\ str(item['tense']).lower(): ven_subset.append(item) data = ven_subset else: for item in data: if subset.lower() in\ str(item['assetInfo']['name']).lower(): return_list.append(item) elif subset.lower() in\ str(item['assetInfo']['longName']).lower(): return_list.append(item) elif subset.lower() in\ str(item['ref_des']).lower(): return_list.append(item) elif subset.lower() in\ str(item['assetInfo']['type']).lower(): return_list.append(item) elif subset.lower() in\ str(item['assetInfo']['array']).lower(): return_list.append(item) elif subset.lower() in\ str(item['events']).lower(): return_list.append(item) elif subset.lower() in\ str(item['metaData']).lower(): return_list.append(item) elif subset.lower() in\ str(item['tense']).lower(): return_list.append(item) data = return_list except KeyError as e: pass if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "assets": data_slice}) return result else: if normal_data: result = data else: result = jsonify({'assets': data}) return result
def get_assets(): ''' Listing GET request of all assets. This method is cached for 1 hour. ''' #Manually set up the cache cached = cache.get('asset_list') if cached: data = cached else: uframe_obj = uFrameAssetCollection() data = uframe_obj.to_json() for row in data: lat = "" lon = "" ref_des = "" deployment_number = "" row['id'] = row.pop('assetId') row['asset_class'] = row.pop('@class') row['events'] = _associate_events(row['id']) try: if row['metaData'] is not None: for meta_data in row['metaData']: if meta_data['key'] == 'Laditude ': meta_data['key'] = 'Latitude' if meta_data['key'] == 'Latitude': lat = meta_data['value'] coord = _convert_lat_lon(lat, "") meta_data['value'] = coord[0] if meta_data['key'] == 'Longitude': lon = meta_data['value'] coord = _convert_lat_lon("", lon) meta_data['value'] = coord[1] if meta_data['key'] == 'Ref Des SN': meta_data['key'] = 'Ref Des' if meta_data['key'] == 'Ref Des': ref_des = meta_data['value'] if meta_data['key'] == 'Deployment Number': deployment_number = meta_data['value'] row['ref_des'] = ref_des if deployment_number is not None: row['deployment_number'] = deployment_number if lat > 0 and lon > 0: row['coordinates'] = _convert_lat_lon(lat, lon) else: for events in row['events']: if events['locationLonLat'] is not None: lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] row['coordinates'] = _convert_lat_lon(lat, lon) lat = 0.0 lon = 0.0 if len(ref_des) > 0: # determine the asset name from the DB if there is none. try: if (row['assetInfo']['name'] == None) or (row['assetInfo']['name'] == ""): row['assetInfo'][ 'name'] = get_display_name_by_rd(ref_des) except: pass except: pass if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) data = sorted(data, key=itemgetter('id')) if request.args.get('min') == 'True': for obj in data: try: del obj['metaData'] del obj['events'] del obj['manufactureInfo'] del obj['notes'] del obj['physicalInfo'] del obj['attachments'] del obj['purchaseAndDeliveryInfo'] del obj['lastModifiedTimestamp'] except KeyError: raise if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in data: if search_term.lower() in str(item['assetInfo']['name']).lower(): return_list.append(item) if search_term.lower() in str(item['id']): return_list.append(item) if search_term.lower() in str(item['ref_des']).lower(): return_list.append(item) if search_term.lower() in str(item['assetInfo']['type']).lower(): return_list.append(item) if search_term.lower() in str(item['events']).lower(): return_list.append(item) if search_term.lower() in str(item['metaData']).lower(): return_list.append(item) data = return_list if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({ "count": count, "total": total, "startAt": start_at, "assets": data_slice }) return result else: result = jsonify({'assets': data}) return result
def streams_list(): ''' Accepts stream_name or reference_designator as a URL argument ''' if request.args.get('stream_name'): try: dict_from_stream(request.args.get('stream_name')) except Exception as e: current_app.logger.exception('**** (1) exception: ' + e.message) return jsonify(error=e.message), 500 cached = cache.get('stream_list') if cached: retval = cached else: try: streams = dfs_streams() except Exception as e: current_app.logger.exception('**** (2) exception: ' + e.message) return jsonify(error=e.message), 500 retval = [] for stream in streams: try: data_dict = dict_from_stream(*stream) except Exception as e: current_app.logger.exception('\n**** (3) exception: ' + e.message) continue if request.args.get('reference_designator'): if request.args.get('reference_designator') != data_dict['reference_designator']: continue retval.append(data_dict) cache.set('stream_list', retval, timeout=CACHE_TIMEOUT) if request.args.get('min') == 'True': for obj in retval: try: del obj['parameter_id'] del obj['units'] del obj['variable_type'] del obj['variable_types'] del obj['variables'] del obj['variables_shape'] except KeyError: raise if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in retval: if search_term.lower() in str(item['stream_name']).lower(): return_list.append(item) if search_term.lower() in str(item['display_name']).lower(): return_list.append(item) if search_term.lower() in str(item['reference_designator']).lower(): return_list.append(item) retval = return_list if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(retval)) retval_slice = retval[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "streams": retval_slice}) return result else: return jsonify(streams=retval)
def get_assets(use_min=False,normal_data=False): ''' Listing GET request of all assets. This method is cached for 1 hour. add in helped params to bypass the json response and minification ''' try: #Manually set up the cache cached = cache.get('asset_list') will_reset_cache = False if request.args.get('reset') == 'true': will_reset_cache = True will_reset = request.args.get('reset') if cached and not(will_reset_cache): data = cached else: uframe_obj = UFrameAssetsCollection() payload = uframe_obj.to_json() if payload.status_code != 200: try: return jsonify({ "assets" : payload.json()}), payload.status_code except AttributeError: try: return jsonify({ "assets" : 'Undefined response'}), payload.status_code except Exception as e: return make_response("unhandled exception: %s. Line # %s" % (e,sys.exc_info()[2].tb_lineno ), 500) data = payload.json() for row in data: lat = "" lon = "" ref_des = "" has_deployment_event = False deployment_number = "" try: row['id'] = row.pop('assetId') row['asset_class'] = row.pop('@class') row['events'] = associate_events(row['id']) if row['metaData'] is not None: for meta_data in row['metaData']: if meta_data['key'] == 'Laditude ': meta_data['key'] = 'Latitude' if meta_data['key'] == 'Latitude': lat = meta_data['value'] coord = convert_lat_lon(lat,"") meta_data['value'] = coord[0] if meta_data['key'] == 'Longitude': lon = meta_data['value'] coord = convert_lat_lon("",lon) meta_data['value'] = coord[1] if meta_data['key'] == 'Ref Des SN': meta_data['key'] = 'Ref Des' if meta_data['key'] == 'Ref Des': ref_des = meta_data['value'] if meta_data['key'] == 'Deployment Number': deployment_number = meta_data['value'] row['ref_des'] = ref_des if len(row['ref_des']) == 27: row['asset_class'] = '.InstrumentAssetRecord' if len(row['ref_des']) < 27: row['asset_class'] = '.AssetRecord' if deployment_number is not None: row['deployment_number'] = deployment_number for events in row['events']: if events['locationLonLat'] is not None and lat == 0.0 and lon == 0.0: lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] if events['class'] == '.DeploymentEvent': has_deployment_event = True row['hasDeploymentEvent'] = has_deployment_event row['coordinates'] = convert_lat_lon(lat,lon) lat = 0.0 lon = 0.0 #TODO: Band-aid to work with the old version of uframe on the VM since rutgers is down. if (not(row['assetInfo']) ): row['assetInfo'] = { 'name': '', 'type': '', 'owner': '', 'description': ''} # determine the asset name from the DB if there is none. if (not(row['assetInfo'].has_key('name')) and len(ref_des) > 0): row['assetInfo']['name'] = get_display_name_by_rd(ref_des) or "" row['assetInfo']['longName'] = get_long_display_name_by_rd(ref_des) elif (row['assetInfo'].has_key('name') and len(ref_des) > 0): row['assetInfo']['name'] = row['assetInfo']['name'] or get_display_name_by_rd(ref_des) or "" row['assetInfo']['longName'] = get_long_display_name_by_rd(ref_des) else: row['assetInfo']['name'] = "" except AttributeError, TypeError: raise if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) try: if request.args.get('sort') and request.args.get('sort') != "": sort_by = request.args.get('sort') if not(sort_by): sort_by = 'ref_des' data = sorted(data, key=itemgetter(sort_by)) except Exception as e: print e pass if request.args.get('min') == 'True' or use_min == True: del_count = 0 showDeployments = False deploymentEvents = [] if request.args.get('deployments') == 'True': showDeployments = True for obj in data: try: if obj.has_key('metaData'): del obj['metaData'] if obj.has_key('events'): if showDeployments: for event in obj['events']: if event['class'] == '.DeploymentEvent': deploymentEvents.append(event) del obj['events'] obj['events'] = deploymentEvents deploymentEvents = [] else: del obj['events'] if obj.has_key('manufactureInfo'): del obj['manufactureInfo'] if obj.has_key('notes'): del obj['notes'] if obj.has_key('physicalInfo'): del obj['physicalInfo'] if obj.has_key('attachments'): del obj['attachments'] if obj.has_key('purchaseAndDeliveryInfo'): del obj['purchaseAndDeliveryInfo'] if obj.has_key('lasModifiedTimestamp'): del obj['lastModifiedTimestamp'] except Exception: raise del_count+=1 print "could not delete one or more elements: ",del_count if request.args.get('search') and request.args.get('search') != "": return_list = [] ven_set = [] search_term = str(request.args.get('search')).split() search_set = set(search_term) for subset in search_set: if len(return_list) > 0: if len(ven_set) > 0: ven_set = deepcopy(ven_subset) else: ven_set = deepcopy(return_list) ven_subset = [] for item in return_list: if subset.lower() in str(item['assetInfo']['name']).lower(): ven_subset.append(item) elif subset.lower() in str(item['ref_des']).lower(): ven_subset.append(item) elif subset.lower() in str(item['assetInfo']['type']).lower(): ven_subset.append(item) elif subset.lower() in str(item['events']).lower(): ven_subset.append(item) elif subset.lower() in str(item['metaData']).lower(): ven_subset.append(item) data = ven_subset else: for item in data: if subset.lower() in str(item['assetInfo']['name']).lower(): return_list.append(item) elif subset.lower() in str(item['ref_des']).lower(): return_list.append(item) elif subset.lower() in str(item['assetInfo']['type']).lower(): return_list.append(item) elif subset.lower() in str(item['events']).lower(): return_list.append(item) elif subset.lower() in str(item['metaData']).lower(): return_list.append(item) data = return_list if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "assets": data_slice}) return result else: if normal_data: result = data else: result = jsonify({ 'assets' : data }) return result
def build_display_name(rd): """ Get display name for reference designator using the codes dictionary. "CE01ISSM": { "id": 0, "long_name": "Endurance OR Inshore Surface Mooring", "name": "OR Inshore Surface Mooring" }, "CE01ISSM-MFC31": { "id": 0, "long_name": "Endurance OR Inshore Surface Mooring Multi-Function Node", "name": "Multi-Function Node" }, "CE01ISSM-MFC31-00-CPMENG000": { "id": 1, "long_name": "Endurance OR Inshore Surface Mooring Multi-Function Node Buoy Controller Engineering", "name": "Buoy Controller Engineering" }, { "@class" : ".VocabRecord", "refdes" : "CE01ISSM-MFC31-00-CPMENG000", "vocabId" : 1, "instrument" : "Buoy Controller Engineering", "tocL1" : "Endurance", "tocL2" : "OR Inshore Surface Mooring", "tocL3" : "Multi-Function Node" } Test cases: RS01SUM2-MJ01B-06-MASSPA101 RS03INT1-MJ03C-06-MASSPA301 GI03FLMA-RIS01-00-SIOENG000 """ try: # Get 'vocab_dict' if cached, if not cached build cache, set and continue dict_cached = cache.get('vocab_codes') if dict_cached: vocab_codes = dict_cached else: vocab_dict, vocab_codes = _compile_vocab() cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT) cache.set('vocab_codes', vocab_codes, timeout=CACHE_TIMEOUT) # Verify 'vocab_codes' has content, otherwise error if not vocab_codes: message = 'Vocabulary processing failed to obtain vocab_codes dictionary, unable to process.' current_app.logger.info(message) return None # Process reference designator using 'vocab_dict' and 'vocab_codes' len_rd = len(rd) if len_rd < 8: return None # Prepare prefix processing for Endurance array vocab (tocL2) prefix = '' array_code = rd[:2] # For endurance array, check subsite and apply prefix as required. if array_code == 'CE': key = rd[0:3] if key == 'CE01' or key == 'CE02' or key == 'CE04': prefix = 'OR ' elif key == 'CE06' or key == 'CE07' or key == 'CE09': prefix = 'WA ' # Build display name for instrument if len_rd == 27: subsite, node, instr = rd.split('-', 2) # subsite = 'CE01ISSM', node = 'MFC31' port, instrument = instr.split('-') instr_class = instrument[0:5] line4 = None if instr_class in vocab_codes['classes']: line4 = vocab_codes['classes'][instr_class] if line4 is None: return None result = line4 # Build display name for platform elif len_rd == 14: subsite, node = rd.split('-') # subsite = 'CE01ISSM', node = 'MFC31' node_code = node[0:2] line3 = None if node_code in vocab_codes['nodes']: line3 = vocab_codes['nodes'][node_code] if line3 is None: return None result = line3 # Build display name for mooring elif len_rd == 8: subsite = rd subsite_code = subsite[4:8] line1 = None if array_code in vocab_codes['arrays']: line1 = vocab_codes['arrays'][array_code] line2 = None if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] if line1 is None or line2 is None: return None if prefix: line2 = prefix + line2 result = ' '.join([line1, line2]) else: return None #print '\n\t ***** debug -- build_display_name -- result: ', result return result except Exception as err: message = 'Exception in build display name for %s; %s' % (rd, str(err)) current_app.logger.info(message) return None
def build_long_display_name(rd): """ Get long display name for reference designator using the codes dictionary. { "@class" : ".VocabRecord", "refdes" : "CE01ISSM-MFC31-00-CPMENG000", "vocabId" : 1, "instrument" : "Buoy Controller Engineering", "tocL1" : "Endurance", "tocL2" : "OR Inshore Surface Mooring", "tocL3" : "Multi-Function Node" } Test cases: RS01SUM2-MJ01B-06-MASSPA101 RS03INT1-MJ03C-06-MASSPA301 GI03FLMA-RIS01-00-SIOENG000 Sample: codes: { "arrays": { "CE": "Endurance", "CP": "Pioneer", "GA": "Argentine Basin", "GI": "Irminger Sea", "GP": "Station Papa", "GS": "Southern Ocean", "RS": "Cabled" }, "classes": { "ADCPA": "Velocity Profiler (short range) for mobile assets", "ADCPS": "Velocity Profiler (75 kHz)", "ADCPT": "Velocity Profiler (150kHz)", "BOTPT": "Bottom Pressure and Tilt", "CAMDS": "Digital Still Camera", "CAMHD": "HD Digital Video Camera", "CPMEN": "Buoy Controller Engineering", "CTDAV": "CTD AUV", "CTDBP": "CTD Pumped", "CTDGV": "CTD Glider", "CTDMO": "CTD Mooring (Inductive)", "CTDPF": "CTD Profiler", "D1000": "Temperature Sensor (on the RAS-PPS Seafloor Fluid Sampler)", "DCLEN": "Data Concentrator Logger (DCL) Engineering", "DOFST": "Dissolved Oxygen Fast Response", "DOSTA": "Dissolved Oxygen Stable Response", "ENG00": "Engineering", "FDCHP": "Direct Covariance Flux", "FLOBN": "Benthic Fluid Flow", "FLORD": "2-Wavelength Fluorometer", "FLORT": "3-Wavelength Fluorometer", "HPIES": "Horizontal Electric Field", "HYDBB": "Broadband Acoustic Receiver (Hydrophone)", "HYDGN": "Hydrogen Sensor", "HYDLF": "Low Frequency Acoustic Receiver (Hydrophone)", "MASSP": "Mass Spectrometer", "METBK": "Bulk Meteorology Instrument Package", "MOPAK": "3-Axis Motion Pack", "NUTNR": "Nitrate", "OBSBB": "Broadband Ocean Bottom Seismometer", "OBSBK": "Broadband Ocean Bottom Seismometer", "OBSSK": "Short-Period Ocean Bottom Seismometer", "OBSSP": "Short-Period Ocean Bottom Seismometer", "OPTAA": "Absorption Spectrophotometer", "OSMOI": "Osmosis-Based Water Sampler", "PARAD": "Photosynthetically Available Radiation", "PCO2A": "pCO2 Air-Sea", "PCO2W": "pCO2 Water", "PHSEN": "Seawater pH", "PPSDN": "Particulate DNA Sampler", "PRESF": "Seafloor Pressure", "PREST": "Tidal Seafloor Pressure", "RASFL": "Hydrothermal Vent Fluid Interactive Sampler", "RTE00": "Radar Target Enhancer (RTE)", "SIOEN": "Platform Controller Engineering", "SPKIR": "Spectral Irradiance", "SPPEN": "Surface Piercing Profiler Engineering", "STCEN": "Low Power Buoy Controller Engineering", "THSPH": "Hydrothermal Vent Fluid In-situ Chemistry", "TMPSF": "Diffuse Vent Fluid 3-D Temperature Array", "TRHPH": "Hydrothermal Vent Fluid Temperature and Resistivity", "VADCP": "5-Beam", "VEL3D": "3-D Single Point Velocity Meter", "VELPT": "Single Point Velocity Meter", "WAVSS": "Surface Wave Spectra", "WFPEN": "Wire-Following Profiler Engineering", "ZPLSC": "Bio-acoustic Sonar (Coastal)", "ZPLSG": "Bio-acoustic Sonar (Global)" }, "nodes": { "FM": "Subsurface Buoy", "GL": "Coastal Glider", "GP": "GP", "LJ": "Low-Power Jbox", "LV": "Low-Voltage Node", "MF": "Multi-Function Node", "MJ": "Medium-Power Jbox", "PC": "Platform Interface Controller", "PG": "Profiling Glider", "PN": "Primary Node", "RI": "Mooring Riser", "SB": "Surface Buoy", "SC": "SC", "SF": "Shallow Profiler", "SP": "Surface Piercing Profiler", "WF": "Wire-Following Profiler" }, "subsites": { "ASHS": "Axial Seamount ASHES", "AXBS": "Axial Base", "AXPD": "Axial Base Profiler Dock", "AXPS": "Axial Base Shallow Profiler Mooring", "AXSM": "Axial Base Surface Mooring", "CCAL": "Axial Seamount Central Caldera", "CNSM": "Central Surface Mooring", "CNSP": "Central Surface Piercing Profiler Mooring", "ECAL": "Axial Seamount Eastern Caldera", "FLMA": "Flanking Subsurface Mooring A", "FLMB": "Flanking Subsurface Mooring B", "HYPM": "Apex Profiler Mooring", "INT1": "Axial Seamount International District 1", "INT2": "Axial Seamount International District 2", "ISSM": "Inshore Surface Mooring", "ISSP": "Inshore Surface Piercing Profiler Mooring", "MOAS": "Mobile Asset", "OSBP": "Offshore Cabled Benthic Experiment Package", "OSPD": "Offshore Profiler Dock", "OSPM": "Offshore Profiler Mooring", "OSPS": "Offshore Cabled Shallow Profiler Mooring", "OSSM": "Offshore Surface Mooring", "PACC": "PACC", "PMCI": "Central Inshore Profiler Mooring", "PMCO": "Central Inshore Profiler Mooring", "PMUI": "Upstream Inshore Profiler Mooring", "PMUO": "Upstream Offshore Profiler Mooring", "SBPD": "Slope Base Profiler Dock", "SBPS": "Slope Base Shallow Profiler Mooring", "SHBP": "Shelf Cabled Benthic Experiment Package", "SHDR": "Shelf Cabled DR", "SHSM": "Shelf Surface Mooring ", "SHSP": "Shelf Surface Piercing Profiler Mooring", "SLBS": "Cabled", "SUM1": "Southern Hydrate Ridge Summit 1", "SUM2": "Southern Hydrate Ridge Summit 2", "SUMO": "Apex Surface Mooring" } } Notes: 1. Nodes 'GP' is empty, set value to 'GP'. 2. Subsites was missing the following, add (verification required): { "PACC": "PACC", "OSPD": "Offshore Profiler Dock", "AXPD": "Axial Base Profiler Dock", "SBPD": "Slope Base Profiler Dock", "AXSM": "Axial Base Surface Mooring", "SHDR": "Shelf Cabled DR" } 3. Classes was missing the following, add (verification required): { "CTDAV": "CTD AUV", "FLOBN": "Benthic Fluid Flow", "MASSP": "Mass Spectrometer", "OBSBK": "Broadband Ocean Bottom Seismometer", "OBSSK": "Short-Period Ocean Bottom Seismometer", "OSMOI": "Osmosis-Based Water Sampler", "PPSDN": "Particulate DNA Sampler", "RASFL": "Hydrothermal Vent Fluid Interactive Sampler", "ZPLSG": "Bio-acoustic Sonar (Global)" } """ try: # Get 'vocab_dict' if cached, if not cached build cache, set and continue dict_cached = cache.get('vocab_codes') if dict_cached: vocab_codes = dict_cached else: vocab_dict, vocab_codes = _compile_vocab() cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT) cache.set('vocab_codes', vocab_codes, timeout=CACHE_TIMEOUT) # Verify 'vocab_codes' has content, otherwise error if not vocab_codes: message = 'Vocabulary processing failed to obtain vocab_codes dictionary, unable to process.' current_app.logger.info(message) return None # Process reference designator using 'vocab_dict' and 'vocab_codes' len_rd = len(rd) if len_rd < 8: return None prefix = '' array_code = rd[:2] # For coastal endurance, check subsite and apply prefix as required. if array_code == 'CE': key = rd[0:3] if key == 'CE01' or key == 'CE02' or key == 'CE04': prefix = 'OR ' elif key == 'CE06' or key == 'CE07' or key == 'CE09': prefix = 'WA ' # Build display name for instrument if len_rd == 27: subsite, node, instr = rd.split('-', 2) # subsite = 'CE01ISSM', node = 'MFC31' subsite_code = subsite[4:8] node_code = node[0:2] port, instrument = instr.split('-') instr_class = instrument[0:5] line1 = None if array_code in vocab_codes['arrays']: line1 = vocab_codes['arrays'][array_code] line2 = None if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] line3 = None if node_code in vocab_codes['nodes']: line3 = vocab_codes['nodes'][node_code] line4 = None if instr_class in vocab_codes['classes']: line4 = vocab_codes['classes'][instr_class] if line1 is None or line2 is None or line3 is None or line4 is None: return None if prefix: line2 = prefix + line2 tmp = ' '.join([line1, line2]) result = ' - '.join([tmp, line3, line4]) # Build display name for platform sample: RS01SBPD-DP01A) elif len_rd == 14: subsite, node = rd.split('-') subsite_code = subsite[4:8] node_code = node[0:2] line1 = None if array_code in vocab_codes['arrays']: line1 = vocab_codes['arrays'][array_code] line2 = None if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] line3 = None if node_code in vocab_codes['nodes']: line3 = vocab_codes['nodes'][node_code] if line1 is None or line2 is None or line3 is None: return None if prefix: line2 = prefix + line2 tmp = ' '.join([line1, line2]) result = ' - '.join([tmp, line3]) # Build display name for mooring elif len_rd == 8: subsite = rd subsite_code = subsite[4:8] line1 = None if array_code in vocab_codes['arrays']: line1 = vocab_codes['arrays'][array_code] line2 = None if subsite_code in vocab_codes['subsites']: line2 = vocab_codes['subsites'][subsite_code] if line1 is None or line2 is None: return None if prefix: line2 = prefix + line2 result = ' '.join([line1, line2]) else: return None #print '\n\t ***** debug -- build_long_display_name - result: ', result return result except Exception as err: message = 'Exception in build display name for %s; %s' % (rd, str(err)) current_app.logger.info(message) return None
def get_events(): ''' -- M@C 05/12/2015 Added to support event query on stream data. ''' try: ''' Listing GET request of all events. This method is cached for 1 hour. ''' data = {} cached = cache.get('event_list') if cached: data = cached else: url = current_app.config['UFRAME_ASSETS_URL']\ + '/%s' % 'events' payload = requests.get(url) data = payload.json() if payload.status_code != 200: return jsonify({"events": payload.json()}), payload.status_code data = _compile_events(data) if "error" not in data: cache.set('event_list', data, timeout=CACHE_TIMEOUT) if request.args.get('ref_des') and request.args.get('ref_des') != "": ref_des = request.args.get('ref_des') resp = get_events_by_ref_des(data, ref_des) return resp if request.args.get('search') and request.args.get('search') != "": return_list = [] ven_set = [] ven_subset = [] search_term = str(request.args.get('search')).split() search_set = set(search_term) for subset in search_set: if len(return_list) > 0: if len(ven_set) > 0: ven_set = deepcopy(ven_subset) else: ven_set = deepcopy(return_list) ven_subset = [] for item in return_list: if subset.lower() in str(item['eventClass']).lower(): ven_subset.append(item) elif subset.lower() in str(item['id']).lower(): ven_subset.append(item) elif subset.lower() in str(item['startDate']).lower(): ven_subset.append(item) data = ven_subset else: for item in data: if subset.lower() in str(item['eventClass']).lower(): return_list.append(item) elif subset.lower() in str(item['id']).lower(): return_list.append(item) elif subset.lower() in str(item['startDate']).lower(): return_list.append(item) data = return_list result = jsonify({'events': data}) return result except requests.exceptions.ConnectionError as e: error = "Error: Cannot connect to uframe. %s" % e print error return make_response(error, 500)
def get_assets(use_min=False, normal_data=False, reset=False): """ Get list of all qualified assets. """ debug = False try: # Get 'assets_dict' if cached dict_cached = cache.get('assets_dict') if dict_cached: assets_dict = dict_cached # Get 'asset_list' if cached, else process uframe assets and cache cached = cache.get('asset_list') if cached and reset is not True: data = cached else: data = get_assets_payload() except Exception as err: message = str(err) current_app.logger.info(message) return internal_server_error(message) # Determine field to sort by, sort asset data (ooi-ui-services format) try: if request.args.get('sort') and request.args.get('sort') != "": sort_by = str(request.args.get('sort')) else: sort_by = 'ref_des' data = sorted(data, key=itemgetter(sort_by)) except Exception as err: current_app.logger.info(str(err)) pass # If using minimized ('min') or use_min, then strip asset data if request.args.get('min') == 'True' or use_min is True: showDeployments = False deploymentEvents = [] if request.args.get('deployments') == 'True': showDeployments = True for obj in data: try: if 'metaData' in obj: del obj['metaData'] if 'events' in obj: if showDeployments and obj['events'] is not None: for event in obj['events']: if event['eventClass'] == '.DeploymentEvent': deploymentEvents.append(event) del obj['events'] obj['events'] = deploymentEvents deploymentEvents = [] else: del obj['events'] if 'manufactureInfo' in obj: del obj['manufactureInfo'] if 'notes' in obj: del obj['notes'] if 'physicalInfo' in obj: del obj['physicalInfo'] if 'attachments' in obj: del obj['attachments'] if 'purchaseAndDeliveryInfo' in obj: del obj['purchaseAndDeliveryInfo'] if 'lastModifiedTimestamp' in obj: del obj['lastModifiedTimestamp'] except Exception as err: if debug: print '\n (assets) exception: ', str(err) current_app.logger.info(str(err)) raise """ # Create toc information using geoJSON=true # Sample # request: http://localhost:4000/uframe/assets?geoJSON=true # response: (list of dicts for {mooring | platform} { "assets": [ { "array_id": "CE", "display_name": "Endurance OR Inshore Surface Mooring ", "geo_location": { "coordinates": [ 44.6583, -124.0956 ], "depth": "25" }, "reference_designator": "CE01ISSM" }, . . . """ if request.args.get('geoJSON') and request.args.get('geoJSON') != "": return_list = [] unique = set() for obj in data: asset = {} if (len(obj['ref_des']) <= 14 and 'coordinates' in obj): if (obj['ref_des'] not in unique): unique.add(obj['ref_des']) asset['assetInfo'] = obj.pop('assetInfo') asset['assetInfo']['refDes'] = obj.pop('ref_des') asset['coordinates'] = obj.pop('coordinates') if 'depth' in obj: asset['assetInfo']['depth'] = obj.pop('depth') # Get display name name = asset['assetInfo']['name'] if not name or name is None: name = get_display_name_by_rd(asset['assetInfo']['refDes']) if name is None: name = asset['assetInfo']['refDes'] json = { 'array_id': asset['assetInfo']['refDes'][:2], 'display_name': name, 'geo_location': { 'coordinates': [ round(asset['coordinates'][0], 4), round(asset['coordinates'][1], 4) ], 'depth': asset['assetInfo']['depth'] or None }, 'reference_designator': asset['assetInfo']['refDes'] } return_list.append(json) data = return_list # Search for each search item...two tiered search # - First get search result set: (a) all or (b) limited by tense ('Recovered' or 'Deployed') # - Second using first result set, search for additional search details. results = None if request.args.get('search') and request.args.get('search') != "": results = [] return_list = [] ven_subset = [] search_term = str(request.args.get('search')).split() # Determine if result set to be limited by tense (either 'recovered' or 'deployed') limit_by_tense = False tense_value = None if 'past' in search_term or 'present' in search_term: limit_by_tense = True if 'past' in search_term: search_term.remove('past') tense_value = 'past' else: search_term.remove('present') tense_value = 'present' # Set detailed search set based on request.args provided for search # assetInfo_fields = ['name', 'longName', 'type', 'array'] search_set = set(search_term) try: fields = ['name', 'longName', 'type', 'array', 'metaData', 'tense', 'events'] # Limit by selection of 'recovered' or 'deployed'? if limit_by_tense: # Make asset result set (list), based on tense for item in data: if 'tense' in item: if item['tense']: if (item['tense']).lower() == tense_value: results.append(item) continue # Not limited by tense, result set is all asset data else: results = data # If there are (1) assets to search, and (2) search set details provided if results and search_set: # for each item in the search set, refine list of assets by search term for subset in search_set: subset_lower = subset.lower() for item in results: if 'ref_des' in item: if match_subset(subset_lower, item['ref_des']): return_list.append(item) continue for field in fields: if field in item['assetInfo']: if match_subset(subset_lower, item['assetInfo'][field]): return_list.append(item) break else: if match_subset(subset_lower, item[field]): return_list.append(item) break results = return_list except KeyError as err: message = 'Asset search exception: %s' % str(err) if debug: print '\n debug -- ', message current_app.logger.info(message) pass # If search criteria used and results returned, use results as data if results is not None: data = results if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "assets": data_slice}) return result else: if normal_data: result = data else: result = jsonify({'assets': data}) return result
def _compile_bad_assets(data): """ Process list of 'bad' asset dictionaries from uframe; return list of bad assets. transform into (ooi-ui-services) list of asset dictionaries. Keys in row: [u'purchaseAndDeliveryInfo', u'assetId', u'lastModifiedTimestamp', u'physicalInfo', u'manufactureInfo', u'dataSource', u'remoteDocuments', u'assetInfo', u'@class', u'metaData'] Route: http://localhost:4000/uframe/assets?augmented=false """ bad_data = [] bad_data_ids = [] info = False # detect missing vocab items when unable to create display name(s) feedback = False # (development/debug) display messages while processing each asset vocab_failures = [] dict_asset_ids = {} try: cached = cache.get('asset_rds') if cached: dict_asset_ids = cached if not cached or not isinstance(cached, dict): # If no asset_rds cached, then fetch and cache asset_rds = {} try: asset_rds, _ = _compile_asset_rds() except Exception as err: message = 'Error processing _compile_asset_rds: ', err.message current_app.logger.warning(message) if asset_rds: cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT) else: message = 'Error in asset_rds cache update.' current_app.logger.warning(message) dict_asset_ids = asset_rds except Exception as err: message = 'Error compiling asset_rds: %s' % err.message current_app.logger.info(message) raise Exception(message) # Process uframe list of asset dictionaries (data) valid_asset_classes = ['.InstrumentAssetRecord', '.NodeAssetRecord', '.AssetRecord'] for row in data: ref_des = '' lat = "" lon = "" latest_deployment = None has_deployment_event = False deployment_number = "" try: # Get asset_id, if not asset_id then continue row['augmented'] = False asset_id = None if 'assetId' in row: row['id'] = row.pop('assetId') asset_id = row['id'] if asset_id is None: bad_data.append(row) continue if not asset_id: bad_data.append(row) continue row['asset_class'] = row.pop('@class') row['events'] = associate_events(row['id']) if len(row['events']) == 0: row['events'] = [] row['tense'] = None # If ref_des not provided in row, use dictionary lookup. if asset_id in dict_asset_ids: ref_des = dict_asset_ids[asset_id] # Process metadata (Note: row['metaData'] is often an empty list (especially for instruments). # -- Process when row['metaData'] is None (add metaData if possible) if row['metaData'] is None: if ref_des: row['metaData'] = [{u'type': u'java.lang.String', u'key': u'Ref Des', u'value': ref_des}] else: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # -- Process when row['metaData'] is not None elif row['metaData'] is not None: # If metaData provided is empty and ref_des is available manually add metaData; no ref_des then continue if not row['metaData']: # Manually add 'Ref Des' value to row using ref_des. if ref_des: row['metaData'] = [{u'type': u'java.lang.String', u'key': u'Ref Des', u'value': ref_des}] # No 'metaData' and no ref_des, continue. else: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # Process 'metaData' provided else: for meta_data in row['metaData']: if meta_data['key'] == 'Latitude': lat = meta_data['value'] coord = convert_lat_lon(lat, "") meta_data['value'] = coord[0] if meta_data['key'] == 'Longitude': lon = meta_data['value'] coord = convert_lat_lon("", lon) meta_data['value'] = coord[1] if meta_data['key'] == 'Deployment Number': deployment_number = meta_data['value'] # If key 'Ref Des' has a value, use it, otherwise use ref_des value. if meta_data['key'] == 'Ref Des': if meta_data['value']: ref_des = meta_data['value'] meta_data['value'] = ref_des # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If no reference designator available, but have asset id, continue. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not ref_des or ref_des is None: # If reference designator not provided, use lookup; if still no ref_des, continue if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # Set row values with reference designator row['ref_des'] = ref_des row['Ref Des'] = ref_des if feedback: print '\n debug ---------------- (%r) ref_des: *%s*' % (asset_id, ref_des) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Get asset class based on reference designator # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not row['asset_class'] or row['asset_class'] is None: if len(ref_des) == 27: row['asset_class'] = '.InstrumentAssetRecord' elif len(ref_des) == 14: row['asset_class'] = '.NodeAssetRecord' elif len(ref_des) == 8: row['asset_class'] = '.AssetRecord' else: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue else: # Log asset class is unknown. asset_class = row['asset_class'] if asset_class not in valid_asset_classes: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue if deployment_number is not None: row['deployment_number'] = deployment_number # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Process events # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for events in row['events']: if events['eventClass'] == '.DeploymentEvent': has_deployment_event = True if events['tense'] == 'PRESENT': row['tense'] = events['tense'] else: row['tense'] = 'PAST' if latest_deployment is None and\ events['locationLonLat'] is not None and\ len(events['locationLonLat']) == 2: latest_deployment = events['startDate'] lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] if events['locationLonLat'] is not None and\ latest_deployment is not None and\ len(events['locationLonLat']) == 2 and\ events['startDate'] > latest_deployment: latest_deployment = events['startDate'] lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] row['hasDeploymentEvent'] = has_deployment_event row['coordinates'] = convert_lat_lon(lat, lon) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Populate assetInfo dictionary # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not row['assetInfo']: row['assetInfo'] = { 'name': '', 'type': '', 'owner': '', 'description': '' } # Populate assetInfo type if row['asset_class'] == '.InstrumentAssetRecord': row['assetInfo']['type'] = 'Sensor' elif row['asset_class'] == '.NodeAssetRecord': row['assetInfo']['type'] = 'Mooring' elif row['asset_class'] == '.AssetRecord': if len(ref_des) == 27: row['assetInfo']['type'] = 'Sensor' elif len(ref_des) == 14: row['assetInfo']['type'] = 'Platform' elif len(ref_des) == 8: row['assetInfo']['type'] = 'Mooring' else: if info: message = 'Asset id %d, type .AssetRecord, has malformed a reference designator (%s)' % \ (asset_id, ref_des) current_app.logger.info(message) row['assetInfo']['type'] = 'Unknown' else: if info: message = 'Note ----- Unknown asset_class (%s), set to \'Unknown\'. ' % row['assetInfo']['type'] current_app.logger.info(message) row['assetInfo']['type'] = 'Unknown' try: # Verify all necessary attributes are available, if not create and set to empty. if 'name' not in row['assetInfo']: row['assetInfo']['name'] = '' if 'longName' not in row['assetInfo']: row['assetInfo']['longName'] = '' if 'array' not in row['assetInfo']: row['assetInfo']['array'] = '' if 'assembly' not in row['assetInfo']: row['assetInfo']['assembly'] = '' # Populate assetInfo - name and long name; if failure to get display name, use ref_des, log failure. name = get_dn_by_rd(ref_des) if name is None: if ref_des not in vocab_failures: vocab_failures.append(ref_des) name = ref_des longName = get_ldn_by_rd(ref_des) if longName is None: if ref_des not in vocab_failures: vocab_failures.append(ref_des) longName = ref_des row['assetInfo']['name'] = name row['assetInfo']['longName'] = longName # Populate assetInfo - array and assembly if len(ref_des) >= 8: row['assetInfo']['array'] = get_dn_by_rd(ref_des[:2]) if len(ref_des) >= 14: row['assetInfo']['assembly'] = get_dn_by_rd(ref_des[:14]) except Exception: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue except Exception: continue """ print '\n debug -- len(bad_data): ', len(bad_data) print '\n debug -- len(bad_data_ids): ', len(bad_data_ids) """ return bad_data
def get_assets(): ''' Listing GET request of all assets. This method is cached for 1 hour. ''' #Manually set up the cache cached = cache.get('asset_list') if cached: data = cached else: uframe_obj = uFrameAssetCollection() data = uframe_obj.to_json() for row in data: lat = "" lon = "" ref_des = "" deployment_number = "" row['id'] = row.pop('assetId') row['asset_class'] = row.pop('@class') row['events'] = _associate_events(row['id']) try: if row['metaData'] is not None: for meta_data in row['metaData']: if meta_data['key'] == 'Laditude ': meta_data['key'] = 'Latitude' if meta_data['key'] == 'Latitude': lat = meta_data['value'] coord = _convert_lat_lon(lat,"") meta_data['value'] = coord[0] if meta_data['key'] == 'Longitude': lon = meta_data['value'] coord = _convert_lat_lon("",lon) meta_data['value'] = coord[1] if meta_data['key'] == 'Ref Des SN': meta_data['key'] = 'Ref Des' if meta_data['key'] == 'Ref Des': ref_des = meta_data['value'] if meta_data['key'] == 'Deployment Number': deployment_number = meta_data['value'] row['ref_des'] = ref_des if deployment_number is not None: row['deployment_number'] = deployment_number if lat > 0 and lon > 0: row['coordinates'] = _convert_lat_lon(lat, lon) else: for events in row['events']: if events['locationLonLat'] is not None: lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] row['coordinates'] = _convert_lat_lon(lat,lon) lat = 0.0 lon = 0.0 if len(ref_des) > 0: # determine the asset name from the DB if there is none. try: if (row['assetInfo']['name'] == None) or (row['assetInfo']['name'] == ""): row['assetInfo']['name'] = get_display_name_by_rd(ref_des) except: pass except: pass if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) data = sorted(data, key=itemgetter('id')) if request.args.get('min') == 'True': for obj in data: try: del obj['metaData'] del obj['events'] del obj['manufactureInfo'] del obj['notes'] del obj['physicalInfo'] del obj['attachments'] del obj['purchaseAndDeliveryInfo'] del obj['lastModifiedTimestamp'] except KeyError: raise if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in data: if search_term.lower() in str(item['assetInfo']['name']).lower(): return_list.append(item) if search_term.lower() in str(item['id']): return_list.append(item) if search_term.lower() in str(item['ref_des']).lower(): return_list.append(item) if search_term.lower() in str(item['assetInfo']['type']).lower(): return_list.append(item) if search_term.lower() in str(item['events']).lower(): return_list.append(item) if search_term.lower() in str(item['metaData']).lower(): return_list.append(item) data = return_list if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "assets": data_slice}) return result else: result = jsonify({ 'assets' : data }) return result
def streams_list(): ''' Accepts stream_name or reference_designator as a URL argument ''' if request.args.get('stream_name'): try: dict_from_stream(request.args.get('stream_name')) except Exception as e: current_app.logger.exception('**** (1) exception: ' + e.message) return jsonify(error=e.message), 500 cached = cache.get('stream_list') if cached: retval = cached else: try: streams = dfs_streams() except Exception as e: current_app.logger.exception('**** (2) exception: ' + e.message) return jsonify(error=e.message), 500 retval = [] for stream in streams: try: data_dict = dict_from_stream(*stream) except Exception as e: current_app.logger.exception('\n**** (3) exception: ' + e.message) continue if request.args.get('reference_designator'): if request.args.get('reference_designator' ) != data_dict['reference_designator']: continue retval.append(data_dict) cache.set('stream_list', retval, timeout=CACHE_TIMEOUT) if request.args.get('min') == 'True': for obj in retval: try: del obj['parameter_id'] del obj['units'] del obj['variable_type'] del obj['variable_types'] del obj['variables'] del obj['variables_shape'] except KeyError: raise if request.args.get('search') and request.args.get('search') != "": return_list = [] search_term = request.args.get('search') for item in retval: if search_term.lower() in str(item['stream_name']).lower(): return_list.append(item) if search_term.lower() in str(item['display_name']).lower(): return_list.append(item) if search_term.lower() in str( item['reference_designator']).lower(): return_list.append(item) retval = return_list if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(retval)) retval_slice = retval[start_at:(start_at + count)] result = jsonify({ "count": count, "total": total, "startAt": start_at, "streams": retval_slice }) return result else: return jsonify(streams=retval)
def _compile_assets(data): """ Process list of asset dictionaries from uframe; transform into (ooi-ui-services) list of asset dictionaries. Keys in row: [u'purchaseAndDeliveryInfo', u'assetId', u'lastModifiedTimestamp', u'physicalInfo', u'manufactureInfo', u'dataSource', u'remoteDocuments', u'assetInfo', u'@class', u'metaData'] Sample Event: {'eventId': 83, 'startDate': -62135769600000, 'endDate': None, 'locationLonLat': [], 'notes': 0, 'tense': u'PRESENT', 'eventClass': u'.TagEvent'} Metadata: [{u'type': u'java.lang.String', u'key': u'Anchor Launch Date', u'value': u'20-Apr-14'}, {u'type': u'java.lang.String', u'key': u'Water Depth', u'value': u'0'}, {u'type': u'java.lang.String', u'key': u'Anchor Launch Time', u'value': u'18:26'}, {u'type': u'java.lang.String', u'key': u'Ref Des', u'value': u'CE05MOAS-GL319'}, {u'type': u'java.lang.String', u'key': u'Cruise Number', u'value': u'Oceanus'}, {u'type': u'java.lang.String', u'key': u'Latitude', u'value': u"44\xb042.979' N"}, {u'type': u'java.lang.String', u'key': u'Deployment Number', u'value': u'1'}, {u'type': u'java.lang.String', u'key': u'Recover Date', u'value': u'28-May-14'}, {u'type': u'java.lang.String', u'key': u'Longitude', u'value': u"124\xb032.0615' W"}] """ info = False # Log missing vocab items when unable to create display name(s) feedback = False # Display information as assets are processed new_data = [] bad_data = [] bad_data_ids = [] vocab_failures = [] # Vocabulary failures identified during asset processing are written to log. dict_asset_ids = {} depth = None try: update_asset_rds_cache = False cached = cache.get('asset_rds') if cached: dict_asset_ids = cached if not cached or not isinstance(cached, dict): # If no asset_rds cached, then fetch and cache asset_rds = {} try: asset_rds, _ = _compile_asset_rds() except Exception as err: message = 'Error processing _compile_asset_rds: ', err.message current_app.logger.warning(message) if asset_rds: cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT) dict_asset_ids = asset_rds except Exception as err: message = 'Error compiling asset_rds: %s' % err.message current_app.logger.info(message) raise Exception(message) # Process uframe list of asset dictionaries (data) print '\n Compiling assets...' valid_asset_classes = ['.InstrumentAssetRecord', '.NodeAssetRecord', '.AssetRecord'] for row in data: ref_des = '' lat = "" lon = "" latest_deployment = None has_deployment_event = False deployment_number = "" try: # Get asset_id, if not asset_id then continue row['augmented'] = False asset_id = None if 'assetId' in row: row['id'] = row.pop('assetId') asset_id = row['id'] if asset_id is None: bad_data.append(row) continue if not asset_id: bad_data.append(row) continue row['asset_class'] = row.pop('@class') row['events'] = associate_events(row['id']) if len(row['events']) == 0: row['events'] = [] row['tense'] = None # If ref_des not provided in row, use dictionary lookup. if asset_id in dict_asset_ids: ref_des = dict_asset_ids[asset_id] # Process metadata (Note: row['metaData'] is often an empty list (especially for instruments). # -- Process when row['metaData'] is None (add metaData if possible) if row['metaData'] is None: if ref_des: row['metaData'] = [{u'type': u'java.lang.String', u'key': u'Ref Des', u'value': ref_des}] else: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # -- Process when row['metaData'] is not None elif row['metaData'] is not None: # If metaData provided is empty and ref_des is available manually add metaData; no ref_des then continue if not row['metaData']: # Manually add 'Ref Des' value to row using ref_des. if ref_des: row['metaData'] = [{u'type': u'java.lang.String', u'key': u'Ref Des', u'value': ref_des}] # No 'metaData' and no ref_des, continue. else: if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # Process 'metaData' provided else: for meta_data in row['metaData']: if meta_data['key'] == 'Latitude': lat = meta_data['value'] coord = convert_lat_lon(lat, "") meta_data['value'] = coord[0] if meta_data['key'] == 'Longitude': lon = meta_data['value'] coord = convert_lat_lon("", lon) meta_data['value'] = coord[1] if meta_data['key'] == 'Deployment Number': deployment_number = meta_data['value'] if meta_data['key'] == 'Water Depth': depth = meta_data['value'] # If key 'Ref Des' has a value, use it, otherwise use ref_des value. if meta_data['key'] == 'Ref Des': if meta_data['value']: ref_des = meta_data['value'] meta_data['value'] = ref_des # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # If no reference designator available, but have asset id, continue. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not ref_des or ref_des is None: # If reference designator not provided, use lookup; if still no ref_des, continue if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # Set row values with reference designator row['ref_des'] = ref_des row['Ref Des'] = ref_des if feedback: print '\n debug ---------------- (%r) ref_des: *%s*' % (asset_id, ref_des) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Get asset class based on reference designator # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not row['asset_class'] or row['asset_class'] is None: if len(ref_des) == 27: row['asset_class'] = '.InstrumentAssetRecord' elif len(ref_des) == 14: row['asset_class'] = '.NodeAssetRecord' elif len(ref_des) == 8: row['asset_class'] = '.AssetRecord' else: message = 'ref_des is malformed (%s), unable to determine asset_class.' % row['asset_class'] print '\n INFO: ', message current_app.logger.info(message) if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue else: # Log asset class as unknown. asset_class = row['asset_class'] if asset_class not in valid_asset_classes: if info: message = 'Reference designator (%s) has an asset class value (%s) not one of: %s' % \ (ref_des, asset_class, valid_asset_classes) print '\n INFO: ', message current_app.logger.info(message) if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue if depth is not None: row['depth'] = depth if deployment_number is not None: row['deployment_number'] = deployment_number # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Process events # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for events in row['events']: if events['eventClass'] == '.DeploymentEvent': has_deployment_event = True if events['tense'] == 'PRESENT': row['tense'] = events['tense'] else: row['tense'] = 'PAST' if latest_deployment is None and\ events['locationLonLat'] is not None and\ len(events['locationLonLat']) == 2: latest_deployment = events['startDate'] lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] if events['locationLonLat'] is not None and\ latest_deployment is not None and\ len(events['locationLonLat']) == 2 and\ events['startDate'] > latest_deployment: latest_deployment = events['startDate'] lat = events['locationLonLat'][1] lon = events['locationLonLat'][0] row['hasDeploymentEvent'] = has_deployment_event row['coordinates'] = convert_lat_lon(lat, lon) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Populate assetInfo dictionary # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if not row['assetInfo']: row['assetInfo'] = { 'name': '', 'type': '', 'owner': '', 'description': '' } # Populate assetInfo type if row['asset_class'] == '.InstrumentAssetRecord': row['assetInfo']['type'] = 'Sensor' elif row['asset_class'] == '.NodeAssetRecord': row['assetInfo']['type'] = 'Mooring' elif row['asset_class'] == '.AssetRecord': if len(ref_des) == 27: row['assetInfo']['type'] = 'Sensor' elif len(ref_des) == 14: row['assetInfo']['type'] = 'Platform' elif len(ref_des) == 8: row['assetInfo']['type'] = 'Mooring' else: if info: message = 'Asset id %d, type .AssetRecord, has malformed a reference designator (%s)' % \ (asset_id, ref_des) print '\n INFO: ', message current_app.logger.info(message) row['assetInfo']['type'] = 'Unknown' else: if info: message = 'Note ----- Unknown asset_class (%s), set to \'Unknown\'. ' % row['assetInfo']['type'] print '\n INFO: ', message current_app.logger.info(message) row['assetInfo']['type'] = 'Unknown' try: # Verify all necessary attributes are available, if not create and set to empty. if 'name' not in row['assetInfo']: row['assetInfo']['name'] = '' if 'longName' not in row['assetInfo']: row['assetInfo']['longName'] = '' if 'array' not in row['assetInfo']: row['assetInfo']['array'] = '' if 'assembly' not in row['assetInfo']: row['assetInfo']['assembly'] = '' # Populate assetInfo - name, if failure to get display name, use ref_des, log failure. name = get_dn_by_rd(ref_des) if name is None: if ref_des not in vocab_failures: vocab_failures.append(ref_des) if info: message = 'Vocab Note ----- reference designator (%s) failed to get get_dn_by_rd' % ref_des current_app.logger.info(message) name = ref_des # Populate assetInfo - long name, if failure to get long name then use ref_des, log failure. longName = get_ldn_by_rd(ref_des) if longName is None: if ref_des not in vocab_failures: vocab_failures.append(ref_des) if info: message = 'Vocab Note ----- reference designator (%s) failed to get get_ldn_by_rd' % ref_des current_app.logger.info(message) longName = ref_des row['assetInfo']['name'] = name row['assetInfo']['longName'] = longName # Populate assetInfo - array and assembly if len(ref_des) >= 8: row['assetInfo']['array'] = get_dn_by_rd(ref_des[:2]) if len(ref_des) >= 14: row['assetInfo']['assembly'] = get_dn_by_rd(ref_des[:14]) except Exception as err: # asset info error current_app.logger.info('asset info error' + str(err.message)) if asset_id not in bad_data_ids: bad_data_ids.append(asset_id) bad_data.append(row) continue # Add new row to output dictionary if asset_id and ref_des: row['augmented'] = True new_data.append(row) # if new item for dictionary of asset ids, add id with value of reference designator if asset_id not in dict_asset_ids: dict_asset_ids[asset_id] = ref_des update_asset_rds_cache = True except Exception as err: current_app.logger.info(str(err)) continue if dict_asset_ids: if update_asset_rds_cache: cache.set('asset_rds', dict_asset_ids, timeout=CACHE_TIMEOUT) # Log vocabulary failures (occur when creating display names) if vocab_failures: vocab_failures.sort() message = 'These reference designator(s) are not defined, causing display name failures(%d): %s' \ % (len(vocab_failures), vocab_failures) current_app.logger.info(message) # Update cache for bad_asset_list bad_assets_cached = cache.get('bad_asset_list') if bad_assets_cached: cache.delete('bad_asset_list') cache.set('bad_asset_list', bad_data, timeout=CACHE_TIMEOUT) else: cache.set('bad_asset_list', bad_data, timeout=CACHE_TIMEOUT) print '\n Completed compiling assets...' return new_data, dict_asset_ids