def map(): gpx_str = request.form.get('gps_data_str', None) offset = request.args.get('offset', None) car_str = request.args.get('car_str', None) original_locations = request.args.get('original_locations', '[]') original_times = request.form.get('original_times', '[]') original_times_offset = request.form.get('original_times_offset', 0) match_locations = request.args.get('match_locations', '[]') json_str = None print("MAP REQUESTED FOR %s" % (car_str, )) if gpx_str is not None and len(gpx_str) > 0: print('MAP REQUESTED WITH GPX_STR LENGTH: %d' % (len(gpx_str), )) json_str = sf.convert_gpx_to_json(gpx_str, GMT_OFFSET) elif car_str is not None: # Build analysis list gps_path = 'data/gps/%s/track.json' % (car_str, ) if exists(gps_path): with open(gps_path, 'r') as json_file: json_str = json_file.read() else: gpx_data = request.files.get('gps_data_gpx', None) json_data = request.files.get('gps_data_json', None) nmea_data = request.files.get('gps_data_nmea', None) if gpx_data is not None: gpx_str = gpx_data.stream.read() if len(gpx_str) > 0: print('MAP REQUESTED WITH GPX_STR LENGTH: %d' % (len(gpx_str), )) json_str = sf.convert_gpx_to_json(gpx_str, GMT_OFFSET) if json_str is None and nmea_data is not None: nmea_str = nmea_data.stream.read() if len(nmea_str) > 0: print('MAP REQUESTED WITH NMEA_STR LENGTH: %d' % (len(nmea_str), )) json_str = sf.convert_nmea_to_json(nmea_str, nmea_data.filename, GMT_OFFSET) if json_str is None and json_data is not None: json_str = json_data.stream.read() if json_str is not None: json_str = json_str.replace("'", '"') return sf.template('map', data=json_str, offset=offset, original_locations=original_locations, original_times=original_times, original_times_offset=original_times_offset, match_locations=match_locations)
def cards(type=4): if type == 1: page_list = [[ [ (1, 'red', 'a'), (24, 'blue', 'f'), (3, 'orange', 'd'), ], [ (18, 'purple', 'c'), (4, 'white', 'd'), (2, 'black', 'b'), ], [ (9, 'green', 'a'), (11, 'yellow', 'e'), (19, 'red', 'c'), ], [ (4, 'orange', 'b'), (3, 'blue', 'f'), (21, 'purple', 'a'), ], ]] else: page_list = [] i = 1 page = [] row = [] for car_color in CAR_COLORS: for batch in range(1, len(CAR_NUMBER) + 1, 12): limit = min(batch + 12, len(CAR_NUMBER) + 1) for person_letter in PERSON_LETTERS: for car_number in range(batch, limit): row.append((car_number, car_color, person_letter)) if i % 3 == 0: page.append(row) row = [] if i % 12 == 0: page_list.append(page) page = [] i += 1 return sf.template('cards', page_list=page_list, type=type)
def overview(): return sf.template('overview')
def map_form(): return sf.template('map_form')
def gps_form(): return sf.template('gps')
def images_form(): return sf.template('images')
def index(): return sf.template(None)
def review(car, person): # Process the car and person strings car_str, car_number, car_color, person = sf.process_person(car, person) # Load fix_minute = int(request.args.get('fix_minute', 0)) fix_hour = int(request.args.get('fix_hour', 0)) fix_day = int(request.args.get('fix_day', 0)) vip = 'vip' in request.args # Build analysis list valid = False data = None analysis_dict = {} analysis_dict[person] = None gps_path = 'data/gps/%s/track.json' % (car_str, ) # Find friends for friend in PERSON_LETTERS: friend_path = join('data', 'analysis', car_str, friend) if isdir(friend_path): analysis_dict[friend] = None # Get analysis for person and friends for letter in analysis_dict.keys(): # Load offset from json person_dir = join('data', 'images', car_str, letter) # Load offset offset_path = join(person_dir, 'offset.json') if exists(offset_path): with open(offset_path, 'r') as off: data = json.load(off) offset = data.get('offset', 0.0) else: offset = 0.0 # Load first image try: reported_time_first = vt.parse_exif_unixtime( join(person_dir, 'first.jpg')) reported_time_first += offset except IOError: reported_time_first = '-1' try: reported_time_last = vt.parse_exif_unixtime( join(person_dir, 'last.jpg')) reported_time_last += offset except IOError: reported_time_last = '-1' # Load analysis analysis_list = [] for species in ['zebra', 'giraffe']: analysis_path = join('data', 'analysis', car_str, letter, species) confidence_path = join(analysis_path, 'confidences.json') if exists(confidence_path): with open(confidence_path, 'r') as f: data = json.load(f) confidence_list = sorted(data.items(), key=operator.itemgetter(1), reverse=True) # Load sorted prefixes for (file_prefix, conf) in confidence_list: # print(file_prefix, conf) # Load metadata data_str = '' with open(join(analysis_path, file_prefix + '_data.json')) as f: data = json.load(f) data['original_image_unixtime'] += offset if 'match_annot_locations' not in data: data['match_annot_locations'] = [] data_str = json.dumps(data).replace(' ', '').replace( "'", '"') # Load image paths correspondences = url_for( 'static', filename=join(analysis_path, file_prefix + '_correspondences.jpg')) original = url_for('static', filename=join( analysis_path, file_prefix + '_original.jpg')) match = url_for('static', filename=join(analysis_path, file_prefix + '_match.jpg')) metadata = 'metadata-json=%s' % (data_str, ) # Build analysis analysis = (len(analysis_list), conf, correspondences, original, match, metadata) analysis_list.append(analysis) else: print('ERROR: %s has no analysis' % (analysis_path, )) analysis_dict[letter] = (reported_time_first, reported_time_last, analysis_list) # Set valid flag if exists(gps_path): with open(gps_path, 'r') as json_file: data = json_file.read() if exists(gps_path) and len(analysis_dict[person]) >= 1: valid = True # Set valid if override if 'override' in request.args: valid = True # if data is not None: # data = data.replace("'", '"') return sf.template('review', car_str=car_str, car_color=car_color, car_number=car_number, person=person, analysis_dict=analysis_dict, data=data, valid=valid, fix_minute=fix_minute, fix_hour=fix_hour, fix_day=fix_day, offset=offset, vip=vip)
def workspace(): return sf.template('workspace')