Exemplo n.º 1
0
def get_sensor_last_image(id):
    processor = ImageProcessor()

    print(f'requesting last image with id={id}')
    scaled_up = request.args.get('scale_up')
    scaled_up = 1 if scaled_up is None else int(scaled_up)

    interpolate = request.args.get('interpolate')
    interpolate = True if interpolate == "1" else False

    simulated = request.args.get('simulate')
    simulated = True if simulated == "1" else False

    if simulated:
        last_result = Measurement_test.query.filter(
            Measurement_test.sensor_id == id).order_by(
                Measurement_test.timestamp.desc()).first()
    else:
        last_result = Measurement.query.filter(
            Measurement.sensor_id == int(id)).order_by(
                Measurement.timestamp.desc()).first()
    #get processed frame
    processor.process(last_result.data)
    cv2_data = processor.plot_frame()
    img = fast_thermal_image(cv2_data, scale=scaled_up)

    return Response(img)
This files convert a database (from pgadmin) to a csv that contains 3 columns: the centroids, epoch time and local time
"""

import csv
from localization.processing import ImageProcessor
from help_module.time_helper import convert_to_datetime, get_time_str

pros = ImageProcessor()
data_list = []

folder_location = '../../data/'
file_name = '19042019.csv'

with open(folder_location + file_name) as csvfile:
    reader = csv.reader(csvfile, delimiter=',')
    for index, row in enumerate(reader):
        # print(row)
        # print(index)
        thermal_data = eval(row[1])
        centroids = pros.process(thermal_data)
        meas_datetime = convert_to_datetime(row[3])
        epoch_time = meas_datetime.timestamp()
        local_time = get_time_str(meas_datetime, microseconds=True)
        data_list.append([centroids, epoch_time, local_time])

with open(folder_location + 'centroid_' + file_name, 'w+',
          newline='') as csvfile:
    writer = csv.writer(csvfile)
    for row in data_list:
        writer.writerow(row)