def get_assets_payload(): """ Get all assets from uframe, process in ooi-ui-services list of assets (asset_list) and assets_dict by (key) asset id. Update cache for asset_list and assets_dict. """ try: get_vocab() # Get uframe connect and timeout information uframe_url, timeout, timeout_read = get_uframe_assets_info() url = '/'.join([uframe_url, 'assets']) payload = requests.get(url, timeout=(timeout, timeout_read)) if payload.status_code != 200: message = '(%d) Failed to get uframe assets.' % payload.status current_app.logger.info(message) return internal_server_error(message) result = payload.json() data, assets_dict = _compile_assets(result) if "error" not in data: cache.set('asset_list', data, timeout=CACHE_TIMEOUT) data = cache.get('asset_list') return data except requests.exceptions.ConnectionError as err: message = "ConnectionError getting uframe assets; %s" % str(err) current_app.logger.info(message) return internal_server_error(message) except requests.exceptions.Timeout as err: message = "Timeout getting uframe assets; %s" % str(err) current_app.logger.info(message) return internal_server_error(message) except Exception as err: message = "Error getting uframe assets; %s" % str(err) current_app.logger.info(message) raise
def get_assets_from_uframe(): try: # Get uframe connect and timeout information uframe_url, timeout, timeout_read = get_uframe_assets_info() url = '/'.join([uframe_url, 'assets']) response = requests.get(url, timeout=(timeout, timeout_read)) if response.status_code != 200: message = '(%d) Failed to get uframe assets.' % response.status_code current_app.logger.info(message) return internal_server_error(message) result = response.json() return result except ConnectionError: message = 'ConnectionError getting uframe assets.' current_app.logger.info(message) raise Exception(message) except Timeout: message = 'Timeout getting uframe assets.' current_app.logger.info(message) raise Exception(message) except Exception as err: error = "Error getting uframe assets. %s" % str(err) current_app.logger.info(error) raise
def get_toc(): try: data = get_uframe_toc() return jsonify(toc=data) except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_streams(): ''' Lists all the streams ''' try: UFRAME_DATA = current_app.config['UFRAME_URL'] + '/sensor/m2m/inv' response = requests.get(UFRAME_DATA) return response except: return internal_server_error('uframe connection cannot be made.')
def get_uframe_stream(stream): ''' Lists the reference designators for the streams ''' try: UFRAME_DATA = current_app.config['UFRAME_URL'] + '/sensor/m2m/inv' response = requests.get("/".join([UFRAME_DATA, stream])) return response except: return internal_server_error('uframe connection cannot be made.')
def get_uframe_stream_contents(stream, ref): ''' Gets the stream contents ''' try: UFRAME_DATA = current_app.config['UFRAME_URL'] + '/sensor/m2m/inv' response = requests.get("/".join([UFRAME_DATA, stream, ref])) return response except: return internal_server_error('uframe connection cannot be made.')
def get_structured_toc(): try: mooring_list = [] mooring_key = [] platform_list = [] platform_key = [] instrument_list = [] instrument_key = [] data = get_uframe_toc() for d in data: if d['reference_designator'] not in instrument_key: instrument_list.append({'array_code':d['reference_designator'][0:2], 'display_name': d['instrument_display_name'], 'mooring_code': d['mooring_code'], 'platform_code': d['platform_code'], 'instrument_code': d['platform_code'], 'streams':d['streams'], 'instrument_parameters':d['instrument_parameters'], 'reference_designator':d['reference_designator'] }) instrument_key.append(d['reference_designator']) if d['mooring_code'] not in mooring_key: mooring_list.append({'array_code':d['reference_designator'][0:2], 'mooring_code':d['mooring_code'], 'platform_code':d['platform_code'], 'display_name':d['mooring_display_name'], 'geo_location':[], 'reference_designator':d['mooring_code'] }) mooring_key.append(d['mooring_code']) if d['mooring_code']+d['platform_code'] not in platform_key: platform_list.append({'array_code':d['reference_designator'][0:2], 'platform_code':d['platform_code'], 'mooring_code':d['mooring_code'], 'reference_designator':d['reference_designator'], 'display_name': d['platform_display_name'] }) platform_key.append(d['mooring_code']+d['platform_code']) return jsonify(toc={"moorings":mooring_list, "platforms":platform_list, "instruments":instrument_list }) except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_streams(mooring, platform, instrument, stream_type): ''' Lists all the streams ''' try: uframe_url, timeout, timeout_read = get_uframe_info() url = '/'.join([uframe_url, mooring, platform, instrument, stream_type]) current_app.logger.info("GET %s", url) response = requests.get(url, timeout=(timeout, timeout_read)) return response except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_streams(mooring, platform, instrument, stream_type): ''' Lists all the streams ''' try: uframe_url, timeout, timeout_read = get_uframe_info() url = '/'.join( [uframe_url, mooring, platform, instrument, stream_type]) current_app.logger.info("GET %s", url) response = requests.get(url, timeout=(timeout, timeout_read)) return response except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_stream_metadata_times(ref): ''' Returns the uFrame time bounds response for a given stream ''' mooring, platform, instrument = ref.split('-', 2) try: uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join([uframe_url, mooring, platform, instrument, 'metadata','times']) #current_app.logger.info("GET %s", url) response = requests.get(url, timeout=(timeout, timeout_read)) if response.status_code == 200: return response return jsonify(times={}), 200 except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_instrument_metadata(ref): ''' Returns the uFrame metadata response for a given stream ''' try: mooring, platform, instrument = ref.split('-', 2) uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join([uframe_url, mooring, platform, instrument, 'metadata']) response = requests.get(url, timeout=(timeout, timeout_read)) if response.status_code == 200: data = response.json() return jsonify(metadata=data['parameters']) return jsonify(metadata={}), 404 except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_instrument_metadata(ref): ''' Returns the uFrame metadata response for a given stream ''' try: mooring, platform, instrument = ref.split('-', 2) uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join([uframe_url, mooring, platform, instrument, 'metadata']) response = requests.get(url, timeout=(timeout, timeout_read)) if response.status_code == 200: data = response.json() return jsonify(metadata=data['parameters']) return jsonify(metadata={}), 404 except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_stream_metadata_times(ref): ''' Returns the uFrame time bounds response for a given stream ''' mooring, platform, instrument = ref.split('-', 2) try: uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join( [uframe_url, mooring, platform, instrument, 'metadata', 'times']) #current_app.logger.info("GET %s", url) response = requests.get(url, timeout=(timeout, timeout_read)) if response.status_code == 200: return response return jsonify(times={}), 200 except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_stream_contents(mooring, platform, instrument, stream_type, stream, start_time, end_time, dpa_flag, provenance='false', annotations='false'): """ Gets the bounded stream contents, start_time and end_time need to be datetime objects; returns Respnse object. """ try: if dpa_flag == '0': query = '?beginDT=%s&endDT=%s&include_provenance=%s&include_annotations=%s' % (start_time, end_time, provenance, annotations) else: query = '?beginDT=%s&endDT=%s&include_provenance=%s&include_annotations=%s&execDPA=true' % (start_time, end_time, provenance, annotations) uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join([uframe_url, mooring, platform, instrument, stream_type, stream + query]) current_app.logger.debug('***** url: ' + url) response = requests.get(url, timeout=(timeout, timeout_read)) if not response: raise Exception('No data available from uFrame for this request.') if response.status_code != 200: raise Exception('(%s) failed to retrieve stream contents from uFrame', response.status_code) #pass return response except Exception as e: return internal_server_error('uFrame connection cannot be made. ' + str(e.message))
def get_uframe_stream_contents(mooring, platform, instrument, stream_type, stream, start_time, end_time, dpa_flag, provenance='false', annotations='false'): """ Gets the bounded stream contents, start_time and end_time need to be datetime objects; returns Respnse object. """ try: if dpa_flag == '0': query = '?beginDT=%s&endDT=%s&include_provenance=%s&include_annotations=%s' % ( start_time, end_time, provenance, annotations) else: query = '?beginDT=%s&endDT=%s&include_provenance=%s&include_annotations=%s&execDPA=true' % ( start_time, end_time, provenance, annotations) uframe_url, timeout, timeout_read = get_uframe_info() url = "/".join([ uframe_url, mooring, platform, instrument, stream_type, stream + query ]) current_app.logger.debug('***** url: ' + url) response = requests.get(url, timeout=(timeout, timeout_read)) if not response: raise Exception('No data available from uFrame for this request.') if response.status_code != 200: raise Exception( '(%s) failed to retrieve stream contents from uFrame', response.status_code) #pass return response except Exception as e: return internal_server_error('uFrame connection cannot be made. ' + str(e.message))
def get_uframe_stream_contents_chunked(mooring, platform, instrument, stream_type, stream, start_time, end_time, dpa_flag): ''' Gets the bounded stream contents, start_time and end_time need to be datetime objects ''' try: if dpa_flag == '0': query = '?beginDT=%s&endDT=%s' % (start_time, end_time) else: query = '?beginDT=%s&endDT=%s&execDPA=true' % (start_time, end_time) UFRAME_DATA = current_app.config['UFRAME_URL'] + current_app.config['UFRAME_URL_BASE'] url = "/".join([UFRAME_DATA,mooring, platform, instrument, stream_type, stream + query]) print "***:",url TOO_BIG = 1024 * 1024 * 15 # 15MB CHUNK_SIZE = 1024 * 32 #...KB TOTAL_SECONDS = 20 dataBlock = "" idx = 0 #counter t0 = time.time() with closing(requests.get(url,stream=True)) as response: content_length = 0 for chunk in response.iter_content(chunk_size=CHUNK_SIZE): content_length = content_length + CHUNK_SIZE t1 = time.time() total = t1-t0 idx+=1 if content_length > TOO_BIG or total > TOTAL_SECONDS: #('uframe response to large.') # break it down to the last know good spot t00 = time.time() idx_c = dataBlock.rfind('}, {') dataBlock = dataBlock[:idx_c] dataBlock+="} ]" t11 = time.time() totaln = t11-t00 print "size_limit or time reached",content_length/(1024 * 1024),total,totaln,idx return json.loads(dataBlock),200 # all the data is in the resonse return it as normal #previousBlock = dataBlock dataBlock+=chunk #print "transfer complete",content_length/(1024 * 1024),total #if str(dataBlock[-3:-1]) != '} ]': # idx_c = dataBlock.rfind('}') # dataBlock = dataBlock[:idx_c] # dataBlock+="} ]" # print 'uFrame appended Error Message to Stream',"\n",dataBlock[-3:-1] idx_c = dataBlock.rfind('} ]') if idx_c == -1: dataBlock+="]" return json.loads(dataBlock),200 except Exception,e: #return json.loads(dataBlock), 200 return internal_server_error('uframe connection unstable.'),500
def get_toc(): try: data = get_uframe_toc() return jsonify(toc=data) except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_structured_toc(): try: mooring_list = [] mooring_key = [] platform_list = [] platform_key = [] instrument_list = [] instrument_key = [] data = get_uframe_toc() for d in data: if d['reference_designator'] not in instrument_key: instrument_list.append({ 'array_code': d['reference_designator'][0:2], 'display_name': d['instrument_display_name'], 'mooring_code': d['mooring_code'], 'platform_code': d['platform_code'], 'instrument_code': d['platform_code'], 'streams': d['streams'], 'instrument_parameters': d['instrument_parameters'], 'reference_designator': d['reference_designator'] }) instrument_key.append(d['reference_designator']) if d['mooring_code'] not in mooring_key: mooring_list.append({ 'array_code': d['reference_designator'][0:2], 'mooring_code': d['mooring_code'], 'platform_code': d['platform_code'], 'display_name': d['mooring_display_name'], 'geo_location': [], 'reference_designator': d['mooring_code'] }) mooring_key.append(d['mooring_code']) if d['mooring_code'] + d['platform_code'] not in platform_key: platform_list.append({ 'array_code': d['reference_designator'][0:2], 'platform_code': d['platform_code'], 'mooring_code': d['mooring_code'], 'reference_designator': d['reference_designator'], 'display_name': d['platform_display_name'] }) platform_key.append(d['mooring_code'] + d['platform_code']) return jsonify( toc={ "moorings": mooring_list, "platforms": platform_list, "instruments": instrument_list }) except Exception as e: return internal_server_error('uframe connection cannot be made.' + str(e.message))
def get_uframe_stream_contents_chunked(mooring, platform, instrument, stream_type, stream, start_time, end_time, dpa_flag): ''' Gets the bounded stream contents, start_time and end_time need to be datetime objects ''' try: if dpa_flag == '0': query = '?beginDT=%s&endDT=%s' % (start_time, end_time) else: query = '?beginDT=%s&endDT=%s&execDPA=true' % (start_time, end_time) UFRAME_DATA = current_app.config['UFRAME_URL'] + current_app.config[ 'UFRAME_URL_BASE'] url = "/".join([ UFRAME_DATA, mooring, platform, instrument, stream_type, stream + query ]) print "***:", url TOO_BIG = 1024 * 1024 * 15 # 15MB CHUNK_SIZE = 1024 * 32 #...KB TOTAL_SECONDS = 20 dataBlock = "" idx = 0 #counter t0 = time.time() with closing(requests.get(url, stream=True)) as response: content_length = 0 for chunk in response.iter_content(chunk_size=CHUNK_SIZE): content_length = content_length + CHUNK_SIZE t1 = time.time() total = t1 - t0 idx += 1 if content_length > TOO_BIG or total > TOTAL_SECONDS: #('uframe response to large.') # break it down to the last know good spot t00 = time.time() idx_c = dataBlock.rfind('}, {') dataBlock = dataBlock[:idx_c] dataBlock += "} ]" t11 = time.time() totaln = t11 - t00 print "size_limit or time reached", content_length / ( 1024 * 1024), total, totaln, idx return json.loads(dataBlock), 200 # all the data is in the resonse return it as normal #previousBlock = dataBlock dataBlock += chunk #print "transfer complete",content_length/(1024 * 1024),total #if str(dataBlock[-3:-1]) != '} ]': # idx_c = dataBlock.rfind('}') # dataBlock = dataBlock[:idx_c] # dataBlock+="} ]" # print 'uFrame appended Error Message to Stream',"\n",dataBlock[-3:-1] idx_c = dataBlock.rfind('} ]') if idx_c == -1: dataBlock += "]" return json.loads(dataBlock), 200 except Exception, e: #return json.loads(dataBlock), 200 return internal_server_error('uframe connection unstable.'), 500
def get_assets(use_min=False, normal_data=False, reset=False): """ Get list of all qualified assets. """ debug = False try: # Get 'assets_dict' if cached dict_cached = cache.get('assets_dict') if dict_cached: assets_dict = dict_cached # Get 'asset_list' if cached, else process uframe assets and cache cached = cache.get('asset_list') if cached and reset is not True: data = cached else: data = get_assets_payload() except Exception as err: message = str(err) current_app.logger.info(message) return internal_server_error(message) # Determine field to sort by, sort asset data (ooi-ui-services format) try: if request.args.get('sort') and request.args.get('sort') != "": sort_by = str(request.args.get('sort')) else: sort_by = 'ref_des' data = sorted(data, key=itemgetter(sort_by)) except Exception as err: current_app.logger.info(str(err)) pass # If using minimized ('min') or use_min, then strip asset data if request.args.get('min') == 'True' or use_min is True: showDeployments = False deploymentEvents = [] if request.args.get('deployments') == 'True': showDeployments = True for obj in data: try: if 'metaData' in obj: del obj['metaData'] if 'events' in obj: if showDeployments and obj['events'] is not None: for event in obj['events']: if event['eventClass'] == '.DeploymentEvent': deploymentEvents.append(event) del obj['events'] obj['events'] = deploymentEvents deploymentEvents = [] else: del obj['events'] if 'manufactureInfo' in obj: del obj['manufactureInfo'] if 'notes' in obj: del obj['notes'] if 'physicalInfo' in obj: del obj['physicalInfo'] if 'attachments' in obj: del obj['attachments'] if 'purchaseAndDeliveryInfo' in obj: del obj['purchaseAndDeliveryInfo'] if 'lastModifiedTimestamp' in obj: del obj['lastModifiedTimestamp'] except Exception as err: if debug: print '\n (assets) exception: ', str(err) current_app.logger.info(str(err)) raise """ # Create toc information using geoJSON=true # Sample # request: http://localhost:4000/uframe/assets?geoJSON=true # response: (list of dicts for {mooring | platform} { "assets": [ { "array_id": "CE", "display_name": "Endurance OR Inshore Surface Mooring ", "geo_location": { "coordinates": [ 44.6583, -124.0956 ], "depth": "25" }, "reference_designator": "CE01ISSM" }, . . . """ if request.args.get('geoJSON') and request.args.get('geoJSON') != "": return_list = [] unique = set() for obj in data: asset = {} if (len(obj['ref_des']) <= 14 and 'coordinates' in obj): if (obj['ref_des'] not in unique): unique.add(obj['ref_des']) asset['assetInfo'] = obj.pop('assetInfo') asset['assetInfo']['refDes'] = obj.pop('ref_des') asset['coordinates'] = obj.pop('coordinates') if 'depth' in obj: asset['assetInfo']['depth'] = obj.pop('depth') # Get display name name = asset['assetInfo']['name'] if not name or name is None: name = get_display_name_by_rd(asset['assetInfo']['refDes']) if name is None: name = asset['assetInfo']['refDes'] json = { 'array_id': asset['assetInfo']['refDes'][:2], 'display_name': name, 'geo_location': { 'coordinates': [ round(asset['coordinates'][0], 4), round(asset['coordinates'][1], 4) ], 'depth': asset['assetInfo']['depth'] or None }, 'reference_designator': asset['assetInfo']['refDes'] } return_list.append(json) data = return_list # Search for each search item...two tiered search # - First get search result set: (a) all or (b) limited by tense ('Recovered' or 'Deployed') # - Second using first result set, search for additional search details. results = None if request.args.get('search') and request.args.get('search') != "": results = [] return_list = [] ven_subset = [] search_term = str(request.args.get('search')).split() # Determine if result set to be limited by tense (either 'recovered' or 'deployed') limit_by_tense = False tense_value = None if 'past' in search_term or 'present' in search_term: limit_by_tense = True if 'past' in search_term: search_term.remove('past') tense_value = 'past' else: search_term.remove('present') tense_value = 'present' # Set detailed search set based on request.args provided for search # assetInfo_fields = ['name', 'longName', 'type', 'array'] search_set = set(search_term) try: fields = ['name', 'longName', 'type', 'array', 'metaData', 'tense', 'events'] # Limit by selection of 'recovered' or 'deployed'? if limit_by_tense: # Make asset result set (list), based on tense for item in data: if 'tense' in item: if item['tense']: if (item['tense']).lower() == tense_value: results.append(item) continue # Not limited by tense, result set is all asset data else: results = data # If there are (1) assets to search, and (2) search set details provided if results and search_set: # for each item in the search set, refine list of assets by search term for subset in search_set: subset_lower = subset.lower() for item in results: if 'ref_des' in item: if match_subset(subset_lower, item['ref_des']): return_list.append(item) continue for field in fields: if field in item['assetInfo']: if match_subset(subset_lower, item['assetInfo'][field]): return_list.append(item) break else: if match_subset(subset_lower, item[field]): return_list.append(item) break results = return_list except KeyError as err: message = 'Asset search exception: %s' % str(err) if debug: print '\n debug -- ', message current_app.logger.info(message) pass # If search criteria used and results returned, use results as data if results is not None: data = results if request.args.get('startAt'): start_at = int(request.args.get('startAt')) count = int(request.args.get('count')) total = int(len(data)) data_slice = data[start_at:(start_at + count)] result = jsonify({"count": count, "total": total, "startAt": start_at, "assets": data_slice}) return result else: if normal_data: result = data else: result = jsonify({'assets': data}) return result