def test():

    led_resources = vpu.get_led_videos()
    no_led_resources = vpu.get_no_led_videos()

    resultR_led, resultG_led, resultB_led = show_obtained_vs_expected_results(
        ":::RESULTADOS PARA MUESTRAS OBTENIDAS CON LED:::", led_resources)
    resultR_no_led, resultG_no_led, resultB_no_led = show_obtained_vs_expected_results(
        ":::RESULTADOS PARA MUESTRAS OBTENIDAS SIN LED:::", no_led_resources)
    plot_linear_regression("Regresion lineal led", "led", "Esperados",
                           "Obtenidos",
                           np.array(resultG_led)[:, 0],
                           np.array(resultG_led)[:, 1])
    plot_linear_regression("Regresion lineal sin led", "no_led", "Esperados",
                           "Obtenidos",
                           np.array(resultG_no_led)[:, 0],
                           np.array(resultG_no_led)[:, 1])
Exemple #2
0
def test():

    led_resources = vpu.get_led_videos()

    resultR, resultG, resultB = show_rgb_pearson_coefficients(
        ":::COMPARACION CANALES RGB PARA MUESTRAS OBTENIDAS CON LED:::",
        led_resources)
    plot_linear_regression("Regresion lineal canal Rojo", "Rojo", "Esperados",
                           "Obtenidos",
                           np.array(resultR)[:, 0],
                           np.array(resultR)[:, 1])
    plot_linear_regression("Regresion lineal canal Verde", "Verde",
                           "Esperados", "Obtenidos",
                           np.array(resultG)[:, 0],
                           np.array(resultG)[:, 1])
    plot_linear_regression("Regresion lineal canal Azul", "Azul", "Esperados",
                           "Obtenidos",
                           np.array(resultB)[:, 0],
                           np.array(resultB)[:, 1])
Exemple #3
0
from src.utils import video_processing_utils as vpu
from src.utils import plot_utils as pu
from src.utils import fft_calc_utils as fcu
from src.utils.directory_utils import validateDirectories

video_path = '../../res/videos/'
video_format = '.mp4'

validateDirectories()

videos = vpu.get_led_videos()

led_freqs = []

for v in videos:

    [r, g, b, f] = vpu.getFilteredRGBVectors(
        video_path + v[0] + '-' + v[1] + '-' + v[2] + video_format,
        vpu.Location.CENTER, 30, 61)
    [R, G, B] = fcu.runFFTWithMethod(fcu.FFTMethod.FFT_ITER_OPT, r, g, b, f)

    led_freqs.append([
        fcu.getHeartRateFromBandVector(R, f),
        fcu.getHeartRateFromBandVector(G, f),
        fcu.getHeartRateFromBandVector(B, f)
    ])

print(led_freqs)
from src.utils import fft_calc_utils as fcu
from src.comparation_methods import get_coefficient_of_determination as r2
from src.utils.directory_utils import validateDirectories

validateDirectories()

def show_results(result):
    for data in result:
        print("  ", data[0], "   | ", "       ", data[1], "   | ", "       ", data[2], "   | ", "       ", data[3])

    print("")


video_path = '../../res/videos/'

resources = vpu.get_led_videos()

locations = [vpu.Location.CENTER, vpu.Location.LEFT, vpu.Location.RIGHT, vpu.Location.LOWER_CENTER,
             vpu.Location.LOWER_LEFT, vpu.Location.LOWER_RIGHT, vpu.Location.UPPER_CENTER,
             vpu.Location.UPPER_LEFT, vpu.Location.UPPER_RIGHT]
obtained_vs_expected_G_CENTER = []
obtained_vs_expected_G_LEFT = []
obtained_vs_expected_G_RIGHT = []
obtained_vs_expected_G_LOWER_CENTER = []
obtained_vs_expected_G_LOWER_LEFT = []
obtained_vs_expected_G_LOWER_RIGHT = []
obtained_vs_expected_G_UPPER_CENTER = []
obtained_vs_expected_G_UPPER_LEFT = []
obtained_vs_expected_G_UPPER_RIGHT = []

for res in resources: