def get_data_json(system_uuid, start_date, end_date): #TODO - verify that the user has the privliges to see the contents of the zip file. logger.info('{}: api/get_data_json/{}/{}/{}'.format( session['user']['nick_name'], system_uuid, start_date, end_date)) try: # Get the fopd device UUID with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select device_uuid from grow_system as gs inner join grow_system_devices as gsd on gs.uuid = gsd.grow_system_uuid where gs.uuid = %s""" cur.execute(sql, (system_uuid, )) # Get the 1st device id returned from the grow system devices list device_uuid = cur.fetchone()[0] result = get_device_data_json(device_uuid, start_date, end_date, session['user']['ct_offset']) if result: return result else: #TODO: Need a different error message here than the s3_error return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') except: logger.error('in /api/get_data_json route: {}, {}'.format( exc_info()[0], exc_info()[1])) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png')
def process_login(): try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: if authenticate(request.form['username'][0:150], request.form['password'], cur): logger.info('authenticate succesful') session['user'] = create_new_session( request.form['username'][0:150], cur) return render_template( 'home.html', devices=session['user']['devices'], selected_device=session['user']['devices'][0], chart_list=session['user']['chart_config']['chart_list']) #- return render_template('home.html', chart_list=session['user']['chart_config']['chart_list']) else: logger.warning('authentication failed.') session['user'] = None flash('incorrect username or password') return render_template('login.html') except: logger.error('process_login exception: {}, {}'.format( exc_info()[0], exc_info()[1])) session['user'] = None flash('system error F_PL') return render_template('login.html')
def get_chart_list(system_uuid): #TODO - verify that the user has the privliges to see charts for the # grow system identified by <system_uuid> #TODO - I bet this could moved to a decorator or hell put it in enforce_login! logger.info('{}: api/get_chart_list/{}'.format( session['user']['nick_name'], system_uuid)) try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select chart_config from grow_system inner join grow_system_devices on grow_system.uuid = grow_system_devices.grow_system_uuid inner join device on device.guid = grow_system_devices.device_uuid where uuid = %s;""" cur.execute(sql, (system_uuid, )) assert ( cur.rowcount == 1 ), 'No or more than one device found. Only one device was expected.' return json.dumps({ 'r': True, 'chart_list': [{ 'rel_url': '/chart/{}/{}'.format(cl['vue_name'], system_uuid) } for cl in cur.fetchone()[0]['chart_list']] }) except: logger.error('error {}, {}'.format(exc_info()[0], exc_info()[1])) return json.dumps({'r': False, 'chart_list': [{}]})
def process_api_login(): try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: creds = request.get_json(force=True) if authenticate(creds['username'][0:150], creds['password'][0:150], cur): logger.info('authenticate succesful') session['user'] = create_new_session(creds['username'][0:150], cur) return json.dumps({ 'logged_in': True, 'organizations': session['user']['organizations'] }) else: logger.warning('authentication failed.') session['user'] = None return json.dumps({'logged_in': False, 'organizations': [{}]}) #- return '{"logged_in":false}' except Exception as err: logger.error('process_api_login exception: {}, {}, {}'.format( exc_info()[0], exc_info()[1], err)) session['user'] = None return json.dumps({'logged_in': False, 'organizations': [{}]})
def get_crops(): logger.info('{}: api/get_crops/'.format(session['user']['nick_name'])) try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select grow_batch_id, g.start_date, 'germination' as status, v.common_name, v.species from germination as g inner join grow_batch as gb on g.grow_batch_id = gb.id inner join seed_lot as sl on gb.seed_lot_id = sl.id inner join variety as v on sl.variety_id = v.id union select 100, '20190620' as start_date, 'stage 2' as status, 'basil' as common_name, 'Genovese' as variety;""" cur.execute(sql) if cur.rowcount > 0: crops = [{ 'batch_id': c[0], 'start_date': c[1].strftime('%x'), 'status': c[2], 'name': c[3], 'variety': c[4] } for c in cur.fetchall()] else: crops = [{}] return json.dumps({'r': True, 'crops': crops}) except: logger.error('get_crops exception: {}, {}'.format( exc_info()[0], exc_info()[1])) return json.dumps({'server error': True})
def __init__(self, user_name): self.nick_name = None self.guid = None self.django_username = None self.text_number = None with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select person.nick_name, person.guid, person.django_username, text_number from person where person.django_username = %s;""" cur.execute(sql, (user_name[0:150], )) if cur.rowcount == 1: record = cur.fetchone() self.nick_name = record[0] self.guid = record[1] self.django_username = record[2] self.text_number = record[3] else: logger.warn('no unique database record for {}'.format( user_name[0:150]))
def set_new_password_reset_code(self, rc): #TODO: update the Person table to contain the password reset code and hte timestamp # save the 6 digit number in the db as the reset code with a timeout of say 1 hour with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """update person set password_reset_code = %s, password_reset_code_create_time = now(), password_reset_failed_tries = 0 where guid = %s;""" cur.execute(sql, (rc, self.guid))
def get_device_data_json(device_uuid, start_date, end_date, utc_offset): try: q = """select ea.name as attribute, seo.units as units, seo.utc_timestamp + interval '%s' hour as sample_time, seo.measurement_value as value, d.local_name as device_local_name, es.name as subject_name, esl.location_guid as subject_location_id from environment_observation as eo inner join scalar_environment_observation as seo on eo.id = seo.environment_observation_id inner join environment_attribute as ea on eo.environment_attribute_id = ea.id inner join environment_subject_location as esl on esl.guid = eo.environment_subject_location_guid inner join environment_subject as es on esl.environment_subject_id = es.id inner join device as d on d.guid = eo.participant_guid where eo.participant_guid = %s and utc_timestamp >= timestamp %s and utc_timestamp < timestamp %s order by seo.utc_timestamp desc; """ with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: # Note: We convert time filters from the user's local time to utc. cur.execute(q, (utc_offset, device_uuid, datetime.strptime(start_date, '%Y-%m-%d') - timedelta(hours=utc_offset), datetime.strptime(end_date, '%Y-%m-%d') + timedelta(days=1) - timedelta(hours=utc_offset))) rc = cur.rowcount if rc == 0: logger.warning('No device data available') return json.dumps('No device data available') obs_list = [] for record in cur: #- obs = {'value_name': None} obs = {'type': 'environment'} obs['device_name'] = record[4] obs['device_id'] = device_uuid obs['subject'] = record[5] obs['subject_location_id'] = record[6] obs['attribute'] = record[0] obs['value'] = record[3] obs['units'] = record[1] #- obs['ts'] = record[2].strftime('%c') obs['ts'] = record[2].isoformat() obs_list.append(obs) return json.dumps(obs_list, indent=3) except: logger.error('in get_device_data_json: {}, {}'.format( exc_info()[0], exc_info()[1])) return None
def check_that_unique_user_exists(user_name): # See if the user name given matches anything in the database. with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select person.guid from person where person.django_username = %s;""" cur.execute(sql, (user_name[0:150], )) if cur.rowcount == 1: return True else: return False
def get_data_csv(system_uuid, start_date, end_date): #TODO - verify that the user has the privliges to see the contents of the zip file. #TODO - I bet this could moved to a decorator or hell put it in enforce_login! logger.info('{}: api/get_data_csv/{}/{}/{}'.format( session['user']['nick_name'], system_uuid, start_date, end_date)) try: # Get the fopd device UUID with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select device_uuid from grow_system as gs inner join grow_system_devices as gsd on gs.uuid = gsd.grow_system_uuid where gs.uuid = %s""" cur.execute(sql, (system_uuid, )) device_uuid = cur.fetchone()[0] logger.info('fopd device id {}'.format(device_uuid)) out_fp = StringIO() flask_out_fp = BytesIO() # Fill out_fp with csv formatted lines containing all the devices's observerations if get_device_data(out_fp, device_uuid, start_date, end_date, session['user']['ct_offset']): # Flask wants a byte file so transfer out_fp to flask_out_fp flask_out_fp.write(out_fp.getvalue().encode('utf-8')) flask_out_fp.seek(0) out_fp.close() return send_file(flask_out_fp, mimetype='text/csv', as_attachment=True, attachment_filename='data.csv') else: #TODO: Need a different error message here than the s3_error return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') except: logger.error('in /api/get_data_csv route: {}, {}'.format( exc_info()[0], exc_info()[1])) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png')
def get_device_data(out_fp, device_uuid, start_date, end_date, utc_offset): try: q = """select ea.name as attribute, seo.units as units, seo.utc_timestamp + interval '%s' hour as sample_time, seo.measurement_value as value from environment_observation as eo inner join scalar_environment_observation as seo on eo.id = seo.environment_observation_id inner join environment_attribute as ea on eo.environment_attribute_id = ea.id inner join environment_subject_location as esl on esl.guid = eo.environment_subject_location_guid inner join environment_subject as es on esl.environment_subject_id = es.id where participant_guid = %s and utc_timestamp >= timestamp %s and utc_timestamp < timestamp %s order by seo.utc_timestamp desc; """ with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: # Note: We convert time filters from the user's local time to utc. cur.execute(q, (utc_offset, device_uuid, datetime.strptime(start_date, '%Y-%m-%d') - timedelta(hours=utc_offset), datetime.strptime(end_date, '%Y-%m-%d') + timedelta(days=1) - timedelta(hours=utc_offset))) rc = cur.rowcount if rc == 0: logger.warning('No device data available') return False csv_writer = csv.writer(out_fp, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) rc = 0 csv_writer.writerow(['attribute', 'unit', 'sample time', 'value']) for record in cur: csv_writer.writerow(record) if rc == 0: logger.info('writing first row {}'.format( out_fp.getvalue())) rc = rc + 1 out_fp.seek(0) return True except: logger.error('in get_device_data: {}, {}'.format( exc_info()[0], exc_info()[1])) return False
def get_newest_image_uuid(camera_uuid): """ TODO - finish the docs and the code for this routine """ with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select s3_reference from phenotype_observation inner join phenotype_image on phenotype_observation.id = phenotype_image.phenotype_observation_id where phenotype_observation.participant_guid = %s order by phenotype_observation.utc_timestamp desc;""" cur.execute(sql, (camera_uuid, )) assert (cur.rowcount > 0), 'No image is available for this camera.' return cur.fetchone()[0]
def image(system_uuid): #TODO - verify that the user has the privliges to see the image. #TODO - I bet this could moved to a decorator or hell put it in enforce_login! logger.info('{}: api/image/{}'.format(session['user']['nick_name'], system_uuid)) try: # Get the camera id with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select camera_uuid from grow_system where uuid = %s""" cur.execute(sql, (system_uuid, )) camera_uuid = cur.fetchone()[0] if not has_permission(session['user']['user_guid'], camera_uuid, 'view'): logger.warning( 'user_uuid {}: get image permission failure for org {} and camera {}' .format(session['user']['user_guid'], session['user']['organizations'][0]['guid'], camera_uuid)) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') logger.info('org uud: {}, camera uuid: {}'.format( session['user']['organizations'][0]['guid'], camera_uuid)) result = get_image_file_v2(get_newest_image_uuid(camera_uuid)) if result['image_blob'] != None: return Response(result['image_blob'], mimetype='image/jpg') else: logger.error('get image failure {}'.format(result['msg'])) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') except: logger.error('in /image.jpg route: {}, {}'.format( exc_info()[0], exc_info()[1])) return send_from_directory('/static', 's3_error.jpg', mimetype='image/png')
def chart(data_type, grow_system_guid): #TODO - verify that the user has the privliges to see charts for the # grow system identified by <grow_system_guid> #TODO - I bet this could moved to a decorator or hell put it in enforce_login! logger.info('{}: api/chart/{}/{}'.format(session['user']['nick_name'], data_type, grow_system_guid)) try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: # Use the grow_system_guid to lookup the chart configuration. sql = """select device.chart_config, device.guid from grow_system_devices inner join device on grow_system_devices.device_uuid = device.guid where grow_system_devices.grow_system_uuid = %s;""" cur.execute(sql, (grow_system_guid, )) rc = cur.rowcount assert ( rc == 1 ), 'No chart configurations are associated with grow system: {}'.format( grow_system_guid) r = cur.fetchone() result = generate_chart(r[1], data_type, r[0], session['user']['ct_offset']) if result['bytes'] != None: return Response(result['bytes'], mimetype='image/svg+xml') else: #- return send_from_directory('static', 'graph_error.jpg', mimetype='image/png') return send_from_directory('static', 's3_error.jpg', mimetype='image/png') except: logger.error('error {}, {}'.format(exc_info()[0], exc_info()[1])) return send_from_directory('static', 'graph_error.jpg', mimetype='image/png')
def get_devices(): logger.info('{}: api/get_devices'.format(session['user']['nick_name'])) try: with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select uuid, local_name, grow_system_type, organization_view, organization_admin, group_view, group_admin from grow_system where (organization_uuid = %s and (organization_admin or organization_view)) or (group_uuid in %s and (group_admin or group_view))""" logger.info('get_devics SQL: {}'.format( cur.mogrify(sql, (session['user']['organizations'][0]['guid'], get_user_groups(session['user']['user_guid']))))) cur.execute(sql, (session['user']['organizations'][0]['guid'], get_user_groups(session['user']['user_guid']))) if cur.rowcount > 0: devices = [{ 'grow_system_guid': grow_system[0], 'name': grow_system[1], 'type': grow_system[2], 'access_type': get_perms(grow_system[3:7]) } for grow_system in cur.fetchall()] else: devices = [{}] return json.dumps({'r': True, 'devices': devices}) except: logger.error('get_devices exception: {}, {}'.format( exc_info()[0], exc_info()[1])) return json.dumps({'r': False})
def get_zip(system_uuid, images_per_day, start_date, end_date): #TODO - verify that the user has the privliges to see the contents of the zip file. #TODO - I bet this could moved to a decorator or hell put it in enforce_login! logger.info('{}: api/get_zip/{}/{}/{}/{}'.format( session['user']['nick_name'], system_uuid, images_per_day, start_date, end_date)) try: # Get the camera id with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select camera_uuid from grow_system where uuid = %s""" cur.execute(sql, (system_uuid, )) camera_uuid = cur.fetchone()[0] #TODO - Need to make sure the user has the permission to view this camera. if not has_permission(session['user']['user_guid'], camera_uuid, 'view'): logger.warning( 'user {} does not have permissions to view camera {}'.format( session['user']['user_guid'], camera_uuid)) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') logger.info('org uud: {}, camera uuid: {}'.format( session['user']['organizations'][0]['guid'], camera_uuid)) s3_file_names = get_s3_file_names(camera_uuid, images_per_day, start_date, end_date) # TODO: Retrun a zip archive containing a file that contains text indicating that there are no files available. if len(s3_file_names) == 0: logger.warning('no images found') return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png') zip_archive = BytesIO() with ZipFile(zip_archive, mode='w', compression=ZIP_DEFLATED, allowZip64=False) as zip_file: # TODO: create a context that opens an S3 session on the back end. with S3Session() as s3: for s3_file_name in s3_file_names: current_image = s3.get_s3_image( s3_file_name['s3_reference']) if current_image['image_blob'] != None: zip_file.writestr( s3_file_name['utc_timestamp'].strftime( '%Y_%m_%d_%H_%M.jpg'), current_image['image_blob']) zip_archive.seek(0) logger.info("length of archive: {}".format(len( zip_archive.getvalue()))) return send_file(zip_archive, mimetype='application/zip', as_attachment=True, attachment_filename='image_archive.zip') except: logger.error('in /api/get_zip route: {}, {}'.format( exc_info()[0], exc_info()[1])) return send_from_directory(path.join(app.root_path, 'static'), 's3_error.jpg', mimetype='image/png')
def get_s3_file_names(camera_uuid, images_per_day, start_date, end_date): # Get the file names of the images that are statisfy the start and end dates with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sql = """select s3_reference, utc_timestamp from phenotype_observation inner join phenotype_image on phenotype_observation.id = phenotype_image.phenotype_observation_id where phenotype_observation.participant_guid = %s and phenotype_observation.utc_timestamp >= %s and phenotype_observation.utc_timestamp < %s order by phenotype_observation.utc_timestamp desc;""" # Get the dates in the right shape sd = datetime.strptime(start_date, '%Y-%m-%d') ed = datetime.strptime(end_date, '%Y-%m-%d') + timedelta(days=1) #- logger.info('sql {}'.format(sql)) #- logger.info('{} {} {}'.format(camera_uuid, sd, ed)) cur.execute(sql, (camera_uuid, sd, ed)) #- assert(cur.rowcount > 0), 'No images are available for this camera.' if cur.rowcount < 1: return [] image_period = 24 // int( images_per_day) #// is for floor division e.g. 3/2 = 1. assert image_period != 0, 'error: image_period cannot be zero' current_date = None current_date_observations = [] all_observations = [] # Filter to limit to n images per day where n equals images_per_day. for observation in cur.fetchall(): if current_date == None or current_date != observation[1].date(): #- all_observations.extend([co['s3_reference'] for co in current_date_observations]) all_observations.extend(current_date_observations) current_date = observation[1].date() current_date_observations = [{ 's3_reference': observation[0], 'utc_timestamp': observation[1], 'period_index': observation[1].hour // image_period }] next if not observation[1].hour // image_period in\ [observation['period_index'] for observation in current_date_observations]: current_date_observations.append({ 's3_reference': observation[0], 'utc_timestamp': observation[1], 'period_index': observation[1].hour // image_period }) # add any observations that are left over from the last day processed in the loop above. # Note that the extend method returns None so one cannot return the expression below as the list. # all_observations.extend(current_date_observations) return all_observations
def generate_chart_from_postgresql(device_uuid, data_type, chart_info, ct_offset): """ data_type is a string formatted as: subject_attribute """ try: q = """select seo.units as units, seo.utc_timestamp as timestamp, seo.measurement_value as value from environment_observation as eo inner join scalar_environment_observation as seo on eo.id = seo.environment_observation_id inner join environment_attribute as ea on eo.environment_attribute_id = ea.id inner join environment_subject_location as esl on esl.guid = eo.environment_subject_location_guid inner join environment_subject as es on esl.environment_subject_id = es.id where participant_guid = %s and es.name = %s and ea.name = %s and utc_timestamp > now() - '1 day'::interval order by seo.utc_timestamp desc limit 74; """ with DbConnection(decrypt_dict_vals(dbconfig, {'password'})) as cur: sub_att = data_type.split('_', 1) cur.execute(q, (device_uuid, sub_att[0], sub_att[1])) rc = cur.rowcount if rc == 0: logger.warning('No chart data available') return {'bytes': None} try: global enable_display_unit_error_msg enable_display_unit_error_msg = True values = cur.fetchall() v_lst = [float(apply_unit_conversion({'value':{'units':x[0],'timestamp':x[1],'value':x[2]}}, chart_info))\ for x in values] td = timedelta(hours=ct_offset) logger.info( 'adjusting time with hour offset = {}'.format(ct_offset)) ts_lst = [(x[1] + td).strftime('%m/%d %I:%M %p') for x in values] ts_lst.reverse() logger.info('max. time: {}, min. time: {}'.format( ts_lst[-1], ts_lst[0])) line_chart = pygal.Line(x_label_rotation=20, show_minor_x_labels=False) line_chart.title = chart_info['chart_title'] line_chart.y_title = chart_info['y_axis_title'] line_chart.x_title = chart_info['x_axis_title'] line_chart.x_labels = ts_lst line_chart.x_labels_major = ts_lst[::8] #need to reverse order to go from earliest to latest v_lst.reverse() line_chart.add(chart_info['data_stream_name'], v_lst) f = line_chart.render() return {'bytes': f} except: logger.error('Chart generation failed while processing postgresql records: {}, {}'\ .format(exc_info()[0], exc_info()[1])) return {'bytes': None} except: logger.error( 'Chart generation failed, postgresql retrieval failed: {}, {}'. format(exc_info()[0], exc_info()[1])) return {'bytes': None}