Exemplo n.º 1
0
def check_user_db_exists(ath_un, db_host, superuser_un, superuser_pw):
    conn = None
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'
    with ProgressStdoutRedirection(ath_un):
        print(('User DB Name: ' + str(db_name)))

    sql_check_db_exists = """
    select exists(SELECT datname FROM pg_catalog.pg_database WHERE datname = %s);
    """

    try:
        # read connection parameters

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname='postgres',
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Executing SQL to check whether the database aleady exists')

        cur.execute(sql_check_db_exists, (db_name, ))
        result = cur.fetchone()
        if result[0] is True:
            db_exists = True
        else:
            db_exists = False
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
            with ProgressStdoutRedirection(ath_un):
                print('Database connection closed.')

    return db_exists
Exemplo n.º 2
0
def insert_df_values(ath_un, conn, datafrm, table, unique):
    # Creating a list of tupples from the dataframe values
    tpls = [tuple(x) for x in datafrm.to_numpy()]
    # dataframe columns with Comma-separated
    cols = ','.join(list(datafrm.columns))
    unique = ','.join(unique)

    # SQL query to execute
    sql = "INSERT INTO %s(%s) VALUES %%s ON CONFLICT (%s) DO NOTHING;" % (
        table, cols, unique)
    cur = conn.cursor()
    try:
        psycopg2.extras.execute_values(cur, sql, tpls)
        conn.commit()
        with StdoutRedirection(ath_un):
            print('Custom data succesfully inserted to: {}'.format(table))
        with ProgressStdoutRedirection(ath_un):
            print('Custom data succesfully inserted to: {}'.format(table))
    except (Exception, psycopg2.DatabaseError) as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(e)))

        cur.close()
Exemplo n.º 3
0
def check_user_token_exists(ath_un, db_host, db_name, superuser_un,
                            superuser_pw, encr_pass):
    conn = None
    ath_un = ath_un
    db_name = db_name
    decrypted_dbx_token = None

    sql_check_dbx_token_exists = """
    SELECT dropbox_access_token FROM athlete WHERE ath_un = %s;
    """

    try:
        # connect to the PostgreSQL server
        with ProgressStdoutRedirection(ath_un):
            print(
                'Connecting to the PostgreSQL server to check whether the dbx token exist...'
            )

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        try:
            cur.execute(sql_check_dbx_token_exists, (ath_un, ))
            result = cur.fetchone()
            if result:
                if result[0] is not None:
                    token_exists = True
                    dbx_token = result[0]
                    #Decrypt dbx token
                    decrypted_dbx_token = decrypt(base64.b64decode(dbx_token),
                                                  encr_pass)
                else:
                    token_exists = False
            else:
                token_exists = False
            conn.commit()
        except:
            token_exists = False

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    return token_exists, decrypted_dbx_token
Exemplo n.º 4
0
def mm_user_insert(mm_export_link, ath_un, db_host, db_name, superuser_un,
                   superuser_pw, encr_pass):
    mm_export_link = (mm_export_link, )
    gc_user = (ath_un, )
    db_name = (db_name)
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN

    UPDATE athlete SET mm_export_link = %s where ath_un= %s;

    END IF;
    
    END
    $do$

    """

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Mind Monitor User Data into postgreSQL:')
        cur.execute(sql, (gc_user, mm_export_link, gc_user))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 5
0
def check_host_record_exists(ath_un, db_name, db_host, encr_pass):
    conn_localhost = None

    sql_check_db_host_exists = """
    SELECT db_host FROM db_info WHERE db_name = %s;
    """

    try:
        # connect to the PostgreSQL server
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Checking whether the db_host record aleady exists')

        cur.execute(sql_check_db_host_exists, (db_name, ))
        result = cur.fetchone()

        if result:
            if result[0] is not None:  #Previous record exists
                stored_db_host = result[0]
                if stored_db_host != db_host:  #Previous record does not match
                    db_host_diff = True
                else:
                    db_host_diff = False
            else:  #No previous record exists
                db_host_diff = False
        else:
            db_host_diff = False

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close()

    return db_host_diff
Exemplo n.º 6
0
def check_data_file_exists(data_file_path,ath_un,db_host,db_name,superuser_un,superuser_pw,encr_pass):
    conn = None
    ath_un = (ath_un)
    data_file_path = (data_file_path)
    db_name = db_name

    sql_check_file_exists = """
    SELECT data_file_path FROM files WHERE athlete_id = (select id from athlete where ath_un=%s) and data_file_path =%s;
    """

    try:
         
        # connect to the PostgreSQL server
        with ProgressStdoutRedirection(ath_un):
            print('Connecting to the PostgreSQL server to check if the data_file already exists...')
        conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un, password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement      
        cur.execute(sql_check_file_exists,(ath_un,data_file_path))
        result = cur.fetchone()
        if not result:  
            data_file_exists = False
        else:
            data_file_exists = True
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ProgressStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
            
    return data_file_exists
Exemplo n.º 7
0
def delete_all_db_data(ath_un, mfp_username, db_host, db_name, superuser_un,
                       superuser_pw, encr_pass):
    db_name = (db_name)
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """
         SELECT truncate_schema('public');
         """
    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with StdoutRedirection(ath_un):
            print(('Deleting DB data for user: '******'....'))
        cur.execute(sql)
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()

    except Exception as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print(('All DB data for user: '******' deleted successfully.'))
            with ProgressStdoutRedirection(ath_un):
                print(('All DB data for user: '******' deleted successfully.'))
Exemplo n.º 8
0
def insert_last_synch_timestamp(ath_un, encr_pass, db_name):
    last_synch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    last_synch = (last_synch, )
    user_db = (db_name, )

    sql_postgres_db = """     
    UPDATE db_info SET last_synch = %s where db_name= %s;
    """
    #Insert last_synch into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print(
                'Inserting last_synch timestamp into postgres/db_info table:')

        cur_localhost.execute(sql_postgres_db, (last_synch, user_db))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
Exemplo n.º 9
0
def csv_match_columns2data(ath_un, csv_data):
    sniffer = csv.Sniffer()
    dialect = sniffer.sniff(csv_data.read())
    delimiter = dialect.delimiter
    csv_data.seek(0)  #Move pointer to the beginning
    for i, line in enumerate(csv_data):
        if i == 0:
            headerCount = line.count(delimiter) + 1
        elif i == 1:
            dataCount = line.count(delimiter) + 1
            if (headerCount != dataCount):
                with ProgressStdoutRedirection(ath_un):
                    print(
                        "Warning: Header and data size mismatch in {}. Columns beyond header size will be removed."
                        .format(csv))
        elif i > 1:
            break
    return range(dataCount)
Exemplo n.º 10
0
def cstm_user_insert(ath_un, usr_params_str, usr_params_int, table_name,
                     db_host, db_name, superuser_un, superuser_pw):
    conn = None
    cstm_dbx_link = usr_params_str[0]
    download_folder = usr_params_str[1]
    ui_table_name = usr_params_str[2]
    date_format = usr_params_str[3]
    time_standard = usr_params_str[4]
    datetime_clmn = usr_params_str[5]
    unique = usr_params_int

    ins_query_str = "ARRAY['{}','{}','{}','{}','{}','{}']".format(
        cstm_dbx_link, download_folder, ui_table_name, date_format,
        time_standard, datetime_clmn)

    sql_create_clmn = "ALTER TABLE athlete ADD COLUMN IF NOT EXISTS str_up_{0} VARCHAR[], ADD COLUMN IF NOT EXISTS int_up_{0} INT[] ;".format(
        table_name)
    sql_insert_usr_str_params = "DO $do$ BEGIN IF EXISTS (SELECT id FROM athlete WHERE ath_un = '{}') THEN UPDATE athlete SET str_up_{} = {} where ath_un= '{}';END IF;END $do$".format(
        ath_un, table_name, ins_query_str, ath_un)
    sql_insert_usr_int_params = "DO $do$ BEGIN IF EXISTS (SELECT id FROM athlete WHERE ath_un = '{}') THEN UPDATE athlete SET int_up_{} = ARRAY{} where ath_un= '{}';END IF;END $do$".format(
        ath_un, table_name, unique, ath_un)

    try:
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)
        cur = conn.cursor()
        with ProgressStdoutRedirection(ath_un):
            print('Inserting cstm table user params in to postgresDB :')
        cur.execute(sql_create_clmn)
        cur.execute(sql_insert_usr_str_params)
        cur.execute(sql_insert_usr_int_params)
        conn.commit()
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        print((str(datetime.datetime.now()) + ' [' +
               sys._getframe().f_code.co_name + ']' +
               ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
               '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 11
0
def delete_all_files(output, ath_un):
    folder = os.path.join(output, ath_un)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    for the_file in os.listdir(folder):
        file_path = os.path.join(folder, the_file)
        if os.path.isfile(file_path):
            os.unlink(file_path)
        elif os.path.isdir(file_path):
            shutil.rmtree(file_path)
    with StdoutRedirection(ath_un):
        print(('All XML,JSON and FIT files for user: '******' deleted successfully.'))
    with ProgressStdoutRedirection(ath_un):
        print(('All XML,JSON and FIT files for user: '******' deleted successfully.'))
Exemplo n.º 12
0
def data_file_path_insert(file_path,ath_un,db_host,db_name,superuser_un,superuser_pw,encr_pass):
    file2import = (file_path)
    athlete_id = (ath_un)
    db_name = (db_name)

    conn = None

    sql = """

    INSERT INTO files (data_file_path,athlete_id)

    VALUES
    (%s,(select id from athlete where ath_un=%s))

    ON CONFLICT (data_file_path) DO NOTHING;
    """
    try:
        
        # connect to the PostgreSQL server
        with ProgressStdoutRedirection(ath_un):
            print('Connecting to the PostgreSQL server to insert data_file_path...')
        conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un, password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        
        cur.execute(sql,(file2import,athlete_id))
        conn.commit()
        
        # close the communication with the PostgreSQL
        cur.close()
 
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 13
0
def gc_original_record_insert(file_path,activity_id,ath_un,db_host,db_name,superuser_un,superuser_pw, encr_pass):
    start=time.time()
    file2import = (file_path)
    gc_activity_id = (activity_id)
    ath_un = (ath_un)
    db_name = (db_name)
    activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat,position_long,speed,stance_time,stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power,left_pedal_smoothness,left_torque_effectiveness,power,right_pedal_smoothness,right_torque_effectiveness,temperature,avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,est_core_temp = [None]*38
    hrv_record_list = (None, None, None, None)
    hrv_record_list_combined = []
    hrv_rmssd = None
    hrv_sdrr = None
    hrv_pnn50 = None
    hrv_pnn20 = None										  
    fitfile = FitFile(file2import)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
 
    conn = None
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)
    # two-phase insert prepapre
    conn.tpc_begin(conn.xid(42, 'transaction ID', ath_un))

    # Get all data messages that are of type record
    for record in fitfile.get_messages('record'):
    # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'activity_type':
                activity_type = record_data.value
            if record_data.name == 'altitude':
                altitude = record_data.value
            if record_data.name == 'cadence':
                cadence = record_data.value
            if record_data.name == 'distance':
                distance = record_data.value
            if record_data.name == 'enhanced_altitude':
                enhanced_altitude = record_data.value     
            if record_data.name == 'enhanced_speed':
                enhanced_speed = record_data.value
            if record_data.name == 'fractional_cadence':
                fractional_cadence = record_data.value
            if record_data.name == 'heart_rate':
                heart_rate = record_data.value
            if record_data.name == 'position_lat':
                position_lat = record_data.value
            if record_data.name == 'position_long':
                position_long = record_data.value
            if record_data.name == 'speed':
                speed = record_data.value
            if record_data.name == 'stance_time':
                stance_time = record_data.value
            if record_data.name == 'stance_time_balance':
                stance_time_balance = record_data.value
            if record_data.name == 'step_length':
                step_length = record_data.value
            if record_data.name == 'timestamp':
                timestamp = record_data.value
            if record_data.name == 'vertical_oscillation':
                vertical_oscillation = record_data.value
            if record_data.name == 'vertical_ratio':
                vertical_ratio = record_data.value
            if record_data.name == 'accumulated_power':
                accumulated_power = record_data.value
            if record_data.name == 'left_pedal_smoothness':
                left_pedal_smoothness = record_data.value
            if record_data.name == 'left_torque_effectiveness':
                left_torque_effectiveness = record_data.value
            if record_data.name == 'power':
                power = record_data.value
            if record_data.name == 'right_pedal_smoothness':
                right_pedal_smoothness = record_data.value
            if record_data.name == 'right_torque_effectiveness':
                right_torque_effectiveness = record_data.value
            if record_data.name == 'temperature':
                temperature = record_data.value
            if record_data.name == 'Est Core Temp':
                est_core_temp = record_data.value									   

        sql = """

            INSERT INTO garmin_connect_original_record (activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat
            ,position_long,speed,stance_time,stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power
            ,left_pedal_smoothness,left_torque_effectiveness,power,right_pedal_smoothness,right_torque_effectiveness,temperature,est_core_temp,lap_id)

            VALUES
            (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,(select id from garmin_connect_original_lap where timestamp >= %s and start_time < %s LIMIT 1))
            
            ON CONFLICT (timestamp,lap_id) DO NOTHING;


            """
        try:
            # create a cursor
            cur = conn.cursor()
            # execute a statement
            with StdoutRedirection(ath_un):
                print(('Inserting track record from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
            cur.execute(sql,(activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat,position_long,speed,stance_time,
                            stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power,left_pedal_smoothness,left_torque_effectiveness,
                            power,right_pedal_smoothness,right_torque_effectiveness,temperature,est_core_temp,str(timestamp),str(timestamp)))

            cur.close()       
        except  (Exception, psycopg2.DatabaseError) as error:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(error)))

    # Get all data messages that are of type hrv
    for record in fitfile.get_messages('hrv'):
        # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'time':
                hrv_record_list = record_data.value
                for i in hrv_record_list:
                    if i != None:
                        hrv_record = i
                        hrv_record_list_combined.append(hrv_record*1000)

                        sql_hrv = """
                        INSERT INTO garmin_connect_hrv_tracking(gc_activity_id,hrv)
                        VALUES
                        (%s,%s);
                        """
                        try:
                            # create a cursor
                            cur = conn.cursor()
                            # execute a statement
                            with StdoutRedirection(ath_un):
                                print(('Inserting hrv record from session: ' + str(gc_activity_id)))
                            cur.execute(sql_hrv,(gc_activity_id,hrv_record))
                            # close the communication with the PostgreSQL 
                            cur.close()              
                        except Exception as e:
                            with ErrorStdoutRedirection(ath_un):
                                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
                    else:
                        continue

    # two-phase insert commit or rollback
    try:
        conn.tpc_prepare()
    except  (Exception, psycopg2.DatabaseError) as error:
        conn.tpc_rollback()
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(error)))
    else:
        try:
            conn.tpc_commit()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    #PG: Add Pool Swim specific data
    for record in fitfile.get_messages('sport'):  
        for record_data in record:
            sport = record_data.value
            if sport == 'Pool Swim':
                with StdoutRedirection(ath_un):
                    print(('Activity ' + str(gc_activity_id) + ' is a Pool Swim. Inserting additional data'))  
                
                for record in fitfile.get_messages('length'):

                    for record_data in record:
                        if record_data.name == 'avg_speed':
                            avg_speed = record_data.value  
                        if record_data.name == 'avg_swimming_cadence':
                            avg_swimming_cadence = record_data.value  
                        if record_data.name == 'event_group':
                            event_group = record_data.value  
                        if record_data.name == 'event_type':
                            event_type = record_data.value    
                        if record_data.name == 'length_type':
                            length_type = record_data.value    
                        if record_data.name == 'message_index':
                            message_index = record_data.value     
                        if record_data.name == 'start_time':
                            start_time = record_data.value      
                        if record_data.name == 'swim_stroke':
                            swim_stroke = record_data.value     
                        if record_data.name == 'timestamp':
                            timestamp = record_data.value      
                        if record_data.name == 'total_calories':
                            total_calories = record_data.value       
                        if record_data.name == 'total_elapsed_time':
                            total_elapsed_time = record_data.value      
                        if record_data.name == 'total_strokes':
                            total_strokes = record_data.value       
                        if record_data.name == 'total_timer_time':
                            total_timer_time = record_data.value

                    sql_swim = """

                    CREATE TEMPORARY TABLE temp_garmin_connect_original_record(avg_speed real,avg_swimming_cadence int,event varchar,event_group varchar,event_type varchar,length_type varchar,message_index int
                                                ,start_time varchar,swim_stroke varchar,total_calories int,total_elapsed_time real,total_strokes int,total_timer_time real, timestamp varchar) ON COMMIT DROP;

                    INSERT INTO temp_garmin_connect_original_record (avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index
                                    ,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,timestamp)

                    VALUES

                    (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);

                    UPDATE garmin_connect_original_record
                    SET avg_speed = temp_garmin_connect_original_record.avg_speed,
                        avg_swimming_cadence = temp_garmin_connect_original_record.avg_swimming_cadence,
                        event = temp_garmin_connect_original_record.event,
                        event_group = temp_garmin_connect_original_record.event_group,
                        event_type = temp_garmin_connect_original_record.event_type,
                        length_type = temp_garmin_connect_original_record.length_type,
                        message_index = temp_garmin_connect_original_record.message_index,
                        start_time = temp_garmin_connect_original_record.start_time,
                        swim_stroke = temp_garmin_connect_original_record.swim_stroke,
                        total_calories = temp_garmin_connect_original_record.total_calories,
                        total_elapsed_time = temp_garmin_connect_original_record.total_elapsed_time,
                        total_strokes = temp_garmin_connect_original_record.total_strokes,
                        total_timer_time = temp_garmin_connect_original_record.total_timer_time
                    FROM temp_garmin_connect_original_record
                    WHERE temp_garmin_connect_original_record.timestamp = garmin_connect_original_record.timestamp and garmin_connect_original_record.gc_activity_id = %s;


                    """
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        with StdoutRedirection(ath_un):
                            print(('Inserting Pool swim data record from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
                        cur.execute(sql_swim,(avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,timestamp,gc_activity_id))
                        conn.commit()
                        # close the communication with the PostgreSQL
                        cur.close()            
                    except Exception as e:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
                    
    with StdoutRedirection(ath_un):
        print(('--- All record data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
    with ProgressStdoutRedirection(ath_un):
        print(('--- All record data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
    #Correct Errors. Change all values with SD > 2 to mean
    hrv_record_list_combined = np.array(hrv_record_list_combined)
    hrv_record_list_combined[np.abs(zscore(hrv_record_list_combined)) > 2] = np.median(hrv_record_list_combined)

    #Calculate the square root of the mean square of the differences of RR-intervals
    hrv_rmssd = np.sqrt(np.mean(np.square(np.diff(hrv_record_list_combined))))
    #Calculate the standard deviation of the RR-intervals
    hrv_sdrr = np.std(hrv_record_list_combined)
    #Calculate the number of pairs of successive RR-intervals that differ by more than 50/20 ms
    nn50 = np.sum(np.abs(np.diff(hrv_record_list_combined)) > 50)*1
    nn20 = np.sum(np.abs(np.diff(hrv_record_list_combined)) > 20)*1
    #Calculate he proportion of NN50/NN20 divided by the total number of RR-intervals.
    hrv_pnn50 = 100 * nn50 / len(hrv_record_list_combined)
    hrv_pnn20 = 100 * nn20 / len(hrv_record_list_combined)

    sql_hrv_stats = '''
    UPDATE garmin_connect_original_session
    SET hrv_rmssd = %s,
        hrv_sdrr = %s,
        hrv_pnn50 = %s,
        hrv_pnn20 = %s
    WHERE gc_activity_id = %s;
    '''
    
    try:
        # create a cursor
        cur = conn.cursor()
        # execute a statement
        with StdoutRedirection(ath_un):
            print(('Inserting hrv stats for session: ' + str(gc_activity_id)))
        cur.execute(sql_hrv_stats,(hrv_rmssd,hrv_sdrr,hrv_pnn50,hrv_pnn20,gc_activity_id))
        conn.commit()
        # close the communication with the PostgreSQL 
        cur.close()
            
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()

    end = time.time()
    with ProgressStdoutRedirection(ath_un):
        print('\nExecution_time:')
    with ProgressStdoutRedirection(ath_un):
        print((end-start))
Exemplo n.º 14
0
def gc_user_insert(ath_un, gc_username, gc_password, db_host, db_name,
                   superuser_un, superuser_pw, encr_pass, save_pwd):

    if save_pwd == True:
        #Encrypt gc password.
        if gc_password is not None:
            encrypted_pwd = base64.b64encode(encrypt(gc_password, encr_pass))
            encrypted_pwd = encrypted_pwd.decode('utf-8')
        else:
            encrypted_pwd = None
    else:
        encrypted_pwd = None

    #Query params lists
    gc_user = (gc_username, )
    gc_pwd = (encrypted_pwd, )
    ath_user = (ath_un, )

    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE gc_email = %s) THEN

    UPDATE athlete SET gc_password = %s where gc_email= %s;

    ELSEIF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN

    UPDATE athlete SET gc_password = %s,gc_email = %s where ath_un= %s;


    END IF;
    
    END
    $do$

    """

    try:
        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting GC User Data into postgreSQL:')
        cur.execute(
            sql,
            (gc_user, gc_pwd, gc_user, ath_user, gc_pwd, gc_user, ath_user))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 15
0
def dwnld_insert_oura_data(ath_un,db_host,db_name,superuser_un,superuser_pw,oura_refresh_token,start_date_dt,end_date_dt,save_pwd,encr_pass):

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
    
    oura_params = config(filename="encrypted_settings.ini", section="oura",encr_pass=encr_pass)
    OURA_CLIENT_ID = str(oura_params.get("oura_client_id"))
    OURA_CLIENT_SECRET = str(oura_params.get("oura_client_secret"))
    OURA_TOKEN_URL = str(oura_params.get("oura_token_url"))
    
    #Refresh the access token
    payload = dict(grant_type='refresh_token', refresh_token=oura_refresh_token,client_id=OURA_CLIENT_ID,client_secret=OURA_CLIENT_SECRET)
    refresh = requests.post(OURA_TOKEN_URL, data=payload)
    response = refresh.json()
    access_token = response['access_token']
    refresh_token = response['refresh_token']

    if save_pwd == True:
        encrypted_refresh_token = base64.b64encode(encrypt(refresh_token, encr_pass))
        encrypted_refresh_token = encrypted_refresh_token.decode('utf-8')
    else:
        encrypted_refresh_token = None

    start_date = datetime.datetime.strftime(start_date_dt,"%Y-%m-%d")
    end_date = datetime.datetime.strftime(end_date_dt,"%Y-%m-%d")

    conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)
 
    sql_insert_oura_refresh_token = """

        DO
            $do$
                BEGIN
                    IF EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN
                        UPDATE athlete SET oura_refresh_token = %s where ath_un= %s;
                    END IF;
                END
            $do$

    """

    sql_insert_sleep_summary = """

        INSERT INTO oura_sleep_daily_summary(athlete_id,summary_date,period_id,is_longest,timezone,bedtime_start,bedtime_end,score,score_total,score_disturbances,
        score_efficiency,score_latency,score_rem,score_deep,score_alignment,total,duration,awake,light,rem,deep,onset_latency,restless,efficiency,midpoint_time,hr_lowest,
        hr_average ,rmssd,breath_average ,temperature_delta ,bedtime_end_delta,midpoint_at_delta,bedtime_start_delta,temperature_deviation,temperature_trend_deviation)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date,period_id) DO NOTHING;
        
    """

    sql_insert_activity_summary = """

        INSERT INTO oura_activity_daily_summary(athlete_id,summary_date,day_start,day_end,timezone,score,score_stay_active,score_move_every_hour,score_meet_daily_targets,
        score_training_frequency,score_training_volume,score_recovery_time,daily_movement,non_wear,rest,inactive,inactivity_alerts,low,medium,high,steps,cal_total,
        cal_active,met_min_inactive,met_min_low,met_min_medium,met_min_high,average_met,rest_mode_state,to_target_km,target_miles,total,to_target_miles,target_calories,target_km)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date) DO NOTHING;
        
    """

    sql_insert_readiness_summary = """

        INSERT INTO oura_readiness_daily_summary(athlete_id,summary_date,period_id,score,score_previous_night,score_sleep_balance,score_previous_day,
        score_activity_balance,score_resting_hr,score_hrv_balance,score_recovery_index,score_temperature,rest_mode_state)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date,period_id) DO NOTHING;
        
    """

    sql_insert_sleep_detail = """

        INSERT INTO oura_sleep_detail(oura_sleep_id,timestamp_gmt,hypnogram_5min,hr_5min,rmssd_5min)

        VALUES
        ((select id from oura_sleep_daily_summary where summary_date=%s and period_id=%s),%s,%s,%s,%s)

        ON CONFLICT (timestamp_gmt) DO NOTHING;
        
    """

    sql_insert_activity_detail = """

        INSERT INTO oura_activity_detail(oura_activity_id,timestamp_gmt,class_5min,met_1min)

        VALUES
        ((select id from oura_activity_daily_summary where summary_date=%s),%s,%s,%s)

        ON CONFLICT (timestamp_gmt) DO NOTHING;
        
    """

    sql_insert_utc_offset = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
    """

    try:       
        cur = conn.cursor()
        cur.execute(sql_insert_oura_refresh_token,(ath_un,encrypted_refresh_token,ath_un))
        conn.commit()       
        cur.close()
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))


    #------------------- Retrieve Sleep data ---------------------------

    sleep_data = requests.get('https://api.ouraring.com/v1/sleep?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_sleep = sleep_data.json()
    sleep_df = pd.DataFrame(json_sleep['sleep'])
    ####sleep_df = sleep_df.fillna(0)
    sleep_df = sleep_df.where(pd.notnull(sleep_df), None)

    for row in sleep_df.itertuples():
        awake = row.awake
        bedtime_end = row.bedtime_end
        #convert bedtime_end to utc
        bedtime_end = datetime.datetime.strptime((bedtime_end), "%Y-%m-%dT%H:%M:%S%z")
        bedtime_end = bedtime_end.astimezone(pytz.utc)
        bedtime_end_delta = row.bedtime_end_delta
        bedtime_start = row.bedtime_start
        #convert bedtime_start to utc
        bedtime_start = datetime.datetime.strptime((bedtime_start), "%Y-%m-%dT%H:%M:%S%z")
        bedtime_start = bedtime_start.astimezone(pytz.utc)
        bedtime_start_delta = row.bedtime_start_delta
        breath_average = row.breath_average
        deep = row.deep
        duration = row.duration
        efficiency = row.efficiency
        hr_5min = row.hr_5min
        hr_average = row.hr_average
        hr_lowest = row.hr_lowest
        hypnogram_5min = row.hypnogram_5min
        is_longest = row.is_longest
        light = row.light
        midpoint_at_delta = row.midpoint_at_delta
        midpoint_time = row.midpoint_time
        onset_latency = row.onset_latency
        period_id = row.period_id
        rem = row.rem
        restless = row.restless
        rmssd = row.rmssd
        rmssd_5min = row.rmssd_5min
        score = row.score
        score_alignment = row.score_alignment
        score_deep = row.score_deep
        score_disturbances = row.score_disturbances
        score_efficiency = row.score_efficiency
        score_latency = row.score_latency
        score_rem = row.score_rem
        score_total = row.score_total
        summary_date = row.summary_date
        temperature_delta = row.temperature_delta
        temperature_deviation = row.temperature_deviation
        temperature_trend_deviation = row.temperature_trend_deviation
        timezone = row.timezone
        total = row.total

        with StdoutRedirection(ath_un):
            print('Downloading Oura sleep data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura sleep data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_sleep_summary,(ath_un,summary_date,period_id,is_longest,timezone,datetime.datetime.strftime(bedtime_start,"%Y-%m-%d %H:%M:%S"),
                       datetime.datetime.strftime(bedtime_end,"%Y-%m-%d %H:%M:%S"),score,score_total,score_disturbances,score_efficiency,score_latency,score_rem,score_deep,
                       score_alignment,total,duration,awake,light,rem,deep,onset_latency,restless,efficiency,midpoint_time,hr_lowest,hr_average ,rmssd,breath_average ,
                       temperature_delta,bedtime_end_delta,midpoint_at_delta,bedtime_start_delta,temperature_deviation,temperature_trend_deviation)
                       )
            conn.commit()       
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

        #Create and populate a list of 5min intervals starting from bedtime_start
        rmssd_ints = []
        #add first value (bedtime_start) to 5 min intervals list
        rmssd_ints.append(datetime.datetime.strftime(bedtime_start,"%Y-%m-%d %H:%M:%S"))
        rmssd_int = bedtime_start
        
        for i in range (len(rmssd_5min)-1):
            rmssd_int = rmssd_int + datetime.timedelta(seconds=300)
            rmssd_int_str = datetime.datetime.strftime(rmssd_int,"%Y-%m-%d %H:%M:%S")
            rmssd_ints.append(rmssd_int_str)
        
        #Create sleep detail dataframe
        sleep_detail_df = pd.DataFrame({'timestamp': pd.Series(rmssd_ints), 'hr_5min': pd.Series(hr_5min),'hypnogram_5min': pd.Series(list(hypnogram_5min)),'rmssd_5min': pd.Series(rmssd_5min)})
        ####sleep_detail_df = sleep_detail_df.fillna(0)
        sleep_detail_df = sleep_detail_df.where(pd.notnull(sleep_detail_df), None)
        
        for row in sleep_detail_df.itertuples():
            timestamp_row = row.timestamp
            hr_5min_row = row.hr_5min
            hypnogram_5min_row = row.hypnogram_5min
            rmssd_5min_row = row.rmssd_5min

            try:       
                cur = conn.cursor()
                cur.execute(sql_insert_sleep_detail,(summary_date,period_id,timestamp_row,hypnogram_5min_row,hr_5min_row,rmssd_5min_row))
                conn.commit()       
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))



    #------------------- Retrieve Activity data ---------------------------
    
    
    activity_data = requests.get('https://api.ouraring.com/v1/activity?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_activity = activity_data.json()
    activity_df = pd.DataFrame(json_activity['activity'])
    ####activity_df = activity_df.fillna(0)
    activity_df = activity_df.where(pd.notnull(activity_df), None)

    for row in activity_df.itertuples(): 
        average_met = row.average_met
        cal_active = row.cal_active
        cal_total = row.cal_total
        class_5min = row.class_5min
        daily_movement = row.daily_movement
        day_end = row.day_end
        #convert day_end to utc
        day_end = datetime.datetime.strptime((day_end), "%Y-%m-%dT%H:%M:%S%z")
        day_end = day_end.astimezone(pytz.utc)
        day_start = row.day_start
        #convert day_start to utc
        day_start = datetime.datetime.strptime((day_start), "%Y-%m-%dT%H:%M:%S%z")
        day_start = day_start.astimezone(pytz.utc)
        high = row.high
        inactive = row.inactive
        inactivity_alerts = row.inactivity_alerts
        low = row.low
        medium = row.medium
        met_1min = row.met_1min
        met_min_high = row.met_min_high
        met_min_inactive = row.met_min_inactive
        met_min_low = row.met_min_low
        met_min_medium = row.met_min_medium
        non_wear = row.non_wear
        rest = row.rest
        rest_mode_state = row.rest_mode_state
        try:
            score = row.score
            score_meet_daily_targets = row.score_meet_daily_targets
            score_move_every_hour = row.score_move_every_hour
            score_recovery_time = row.score_recovery_time
            score_stay_active = row.score_stay_active
            score_training_frequency = row.score_training_frequency
            score_training_volume = row.score_training_volume
        #If Ouraring rest mode enabled
        except:
            score=score_meet_daily_targets=score_move_every_hour=score_recovery_time=score_stay_active=score_training_frequency=score_training_volume=None
        steps = row.steps
        summary_date = row.summary_date
        target_calories = row.target_calories
        target_km = row.target_km
        target_miles = row.target_miles
        timezone = row.timezone
        to_target_km = row.to_target_km
        to_target_miles = row.to_target_miles
        total = row.total


        with StdoutRedirection(ath_un):
            print('Downloading Oura activity data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura activity data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_activity_summary,(ath_un,summary_date,datetime.datetime.strftime(day_start,"%Y-%m-%d %H:%M:%S"),datetime.datetime.strftime(day_end,"%Y-%m-%d %H:%M:%S"),
                        timezone,score,score_stay_active,score_move_every_hour,score_meet_daily_targets,score_training_frequency,score_training_volume,score_recovery_time,daily_movement,
                        non_wear,rest,inactive,inactivity_alerts,low,medium,high,steps,cal_total,cal_active,met_min_inactive,met_min_low,met_min_medium,met_min_high,average_met,
                        rest_mode_state,to_target_km,target_miles,total,to_target_miles,target_calories,target_km)
                        )
            conn.commit()       
            cur.close()

            #Insert utc offset into gmt_local_time_difference table if the record not already present from gc
            gmt_local_difference = datetime.timedelta(minutes=timezone)
            local_midnight = datetime.datetime.strptime(summary_date, '%Y-%m-%d')
            gmt_midnight = local_midnight-gmt_local_difference
            local_date = local_midnight.date()

            cur = conn.cursor()
            cur.execute(sql_insert_utc_offset,(ath_un,local_date,local_midnight,gmt_midnight,gmt_local_difference))
            conn.commit()       
            cur.close()
        
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
  
        #Create and populate a list of 1min intervals starting from day_start
        met_ints = []
        #add first value (day_start) to 1 min intervals list
        met_ints.append(datetime.datetime.strftime(day_start,"%Y-%m-%d %H:%M:%S"))
        met_int = day_start
        
        for i in range (len(met_1min)-1):
            met_int = met_int + datetime.timedelta(seconds=60)
            met_int_str = datetime.datetime.strftime(met_int,"%Y-%m-%d %H:%M:%S")
            met_ints.append(met_int_str)
        
        #Pad class_5min_list to match the size of 1min list's
        class_5min_list = list(class_5min)
        def insert_element_list(lst, x, n):
            i = n
            while i < len(lst):
                lst.insert(i, x)
                i+= n+1
            return lst
        
        #Add 4 new "0" elements after every original element
        n=1
        for i in range(4):
            insert_element_list(class_5min_list,'0',n)
            n=n+1

        activity_detail_df = pd.DataFrame({'timestamp': pd.Series(met_ints), 'met_1min': pd.Series(met_1min),'class_5min': pd.Series(class_5min_list)})
        ####activity_detail_df = activity_detail_df.fillna(0)
        activity_detail_df = activity_detail_df.where(pd.notnull(activity_detail_df), None)
        
        for row in activity_detail_df.itertuples():
            timestamp_row = row.timestamp
            met_1min_row = row.met_1min
            class_5min_row = row.class_5min

            try:       
                cur = conn.cursor()
                cur.execute(sql_insert_activity_detail,(summary_date,timestamp_row,class_5min_row,met_1min_row))
                conn.commit()       
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    
    #------------------- Retrieve Readiness data ---------------------------
     
    readiness_data = requests.get('https://api.ouraring.com/v1/readiness?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_readiness = readiness_data.json()
    readiness_df = pd.DataFrame(json_readiness['readiness'])
    ####readiness_df = readiness_df.fillna(0)
    readiness_df = readiness_df.where(pd.notnull(readiness_df), None)


    for row in readiness_df.itertuples(): 
        period_id = row.period_id
        rest_mode_state = row.rest_mode_state
        score = row.score
        score_activity_balance = row.score_activity_balance
        score_hrv_balance = row.score_hrv_balance
        score_previous_day = row.score_previous_day
        score_previous_night = row.score_previous_night 
        score_recovery_index = row.score_recovery_index
        score_resting_hr = row.score_resting_hr
        score_sleep_balance = row.score_sleep_balance 
        score_temperature = row.score_temperature
        summary_date = row.summary_date

        with StdoutRedirection(ath_un):
            print('Downloading Oura readiness data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura readiness data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_readiness_summary,(ath_un,summary_date,period_id,score,score_previous_night,score_sleep_balance,score_previous_day,
                    score_activity_balance,score_resting_hr,score_hrv_balance,score_recovery_index,score_temperature,rest_mode_state)
                        )
            conn.commit()       
            cur.close()

        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
Exemplo n.º 16
0
def gc_original_lap_insert(file_path,activity_id,ath_un, db_host,db_name,superuser_un,superuser_pw,encr_pass):
    file2import = (file_path)
    gc_activity_id = (activity_id)
    ath_un = (ath_un)
    db_name = (db_name)
    avg_cadence,avg_combined_pedal_smoothness,avg_heart_rate,avg_left_pco,avg_left_pedal_smoothness,avg_left_torque_effectiveness,avg_power,avg_right_pco,avg_right_pedal_smoothness,avg_right_torque_effectiveness,avg_speed,end_position_lat,end_position_long,enhanced_avg_speed,enhanced_max_speed,event,event_group,event_type,intensity,lap_trigger,left_right_balance,max_cadence,max_heart_rate,max_power,max_speed,message_index,normalized_power,sport,stand_count,start_position_lat,start_position_long,start_time,time_standing,timestamp,total_ascent,total_calories,total_cycles,total_descent,total_distance,total_elapsed_time,total_fat_calories,total_timer_time,total_work = [None]*43
    avg_cadence_position = (None, None)
    avg_left_power_phase = (None, None, None, None)
    avg_left_power_phase_peak = (None, None, None, None)
    avg_power_position = (None, None)
    avg_right_power_phase = (None, None, None, None)
    avg_right_power_phase_peak = (None, None, None, None)
    max_cadence_position = (None, None)
    max_power_position = (None, None)
    
    fitfile = FitFile(file2import)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    conn = None
         
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)


    # Get all data messages that are of type record
    for record in fitfile.get_messages('lap'):
        # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'avg_cadence':
                avg_cadence = record_data.value
            if record_data.name == 'avg_cadence_position':
                avg_cadence_position = record_data.value
            if record_data.name == 'avg_combined_pedal_smoothness':
                avg_combined_pedal_smoothness = record_data.value
            if record_data.name == 'avg_heart_rate':
                avg_heart_rate = record_data.value
            if record_data.name == 'avg_left_pco':
                avg_left_pco = record_data.value       
            if record_data.name == 'avg_left_pedal_smoothness':
                avg_left_pedal_smoothness = record_data.value
            if record_data.name == 'avg_left_power_phase':
                avg_left_power_phase = record_data.value
            if record_data.name == 'avg_left_power_phase_peak':
                avg_left_power_phase_peak = record_data.value
            if record_data.name == 'avg_left_torque_effectiveness':
                avg_left_torque_effectiveness = record_data.value
            if record_data.name == 'avg_power':
                avg_power = record_data.value
            if record_data.name == 'avg_power_position':
                avg_power_position = record_data.value
            if record_data.name == 'avg_right_pco':
                avg_right_pco = record_data.value
            if record_data.name == 'avg_right_pedal_smoothness':
                avg_right_pedal_smoothness = record_data.value
            if record_data.name == 'avg_right_power_phase':
                avg_right_power_phase = record_data.value   
            if record_data.name == 'avg_right_power_phase_peak':
                avg_right_power_phase_peak = record_data.value
            if record_data.name == 'avg_right_torque_effectiveness':
                avg_right_torque_effectiveness = record_data.value
            if record_data.name == 'avg_speed':
                avg_speed = record_data.value       
            if record_data.name == 'end_position_lat':
                end_position_lat = record_data.value           
            if record_data.name == 'end_position_long':
                end_position_long = record_data.value
            if record_data.name == 'enhanced_avg_speed':
                enhanced_avg_speed = record_data.value
            if record_data.name == 'enhanced_max_speed':
                enhanced_max_speed = record_data.value
            if record_data.name == 'event':
                event = record_data.value
            if record_data.name == 'event_group':
                event_group = record_data.value
            if record_data.name == 'event_type':
                event_type = record_data.value
            if record_data.name == 'intensity':
                intensity = record_data.value
            if record_data.name == 'lap_trigger':
                lap_trigger = record_data.value
            if record_data.name == 'left_right_balance':
                left_right_balance = record_data.value
            if record_data.name == 'max_cadence':
                max_cadence = record_data.value
            if record_data.name == 'max_cadence_position':
                max_cadence_position = record_data.value    
            if record_data.name == 'max_heart_rate':
                max_heart_rate = record_data.value        
            if record_data.name == 'max_power':
                max_power = record_data.value 
            if record_data.name == 'max_power_position':
                max_power_position = record_data.value     
            if record_data.name == 'max_speed':
                max_speed = record_data.value 
            if record_data.name == 'message_index':
                message_index = record_data.value  
            if record_data.name == 'normalized_power':
                normalized_power = record_data.value 
            if record_data.name == 'sport':
                sport = record_data.value   
            if record_data.name == 'stand_count':
                stand_count = record_data.value 
            if record_data.name == 'start_position_lat':
                start_position_lat = record_data.value   
            if record_data.name == 'start_position_long':
                start_position_long = record_data.value 
            if record_data.name == 'start_time':
                start_time = record_data.value
            if record_data.name == 'time_standing':
                time_standing = record_data.value 
            if record_data.name == 'timestamp':
                timestamp = record_data.value 
            if record_data.name == 'total_ascent':
                total_ascent = record_data.value
            if record_data.name == 'total_calories':
                total_calories = record_data.value
            if record_data.name == 'total_cycles':
                total_cycles = record_data.value
            if record_data.name == 'total_descent':
                total_descent = record_data.value
            if record_data.name == 'total_distance':
                total_distance = record_data.value
            if record_data.name == 'total_elapsed_time':
                total_elapsed_time = record_data.value 
            if record_data.name == 'total_fat_calories':
                total_fat_calories = record_data.value
            if record_data.name == 'total_timer_time':
                total_timer_time = record_data.value 
            if record_data.name == 'total_work':
                total_work = record_data.value   

        sql = """

            INSERT INTO garmin_connect_original_lap (avg_cadence,avg_cadence_position,avg_combined_pedal_smoothness,avg_heart_rate,avg_left_pco,avg_left_pedal_smoothness,avg_left_power_phase,
            avg_left_power_phase_peak,avg_left_torque_effectiveness,avg_power,avg_power_position,avg_right_pco,avg_right_pedal_smoothness,avg_right_power_phase,avg_right_power_phase_peak,
            avg_right_torque_effectiveness,avg_speed,end_position_lat,end_position_long,enhanced_avg_speed,enhanced_max_speed,event,event_group,event_type,intensity,lap_trigger,left_right_balance,
            max_cadence,max_cadence_position,max_heart_rate,max_power,max_power_position,max_speed,message_index,normalized_power,sport,stand_count,start_position_lat,start_position_long,
            start_time,time_standing,timestamp,total_ascent,total_calories,total_cycles,total_descent,total_distance,total_elapsed_time,total_fat_calories,total_timer_time,total_work,gc_activity_id)

            VALUES
            (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
            %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

            ON CONFLICT (timestamp,gc_activity_id) DO NOTHING;
            """

        try:
            
            # create a cursor
            cur = conn.cursor()
            # execute a statement
            with StdoutRedirection(ath_un):
                print(('Inserting lap from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
            with ProgressStdoutRedirection(ath_un):
                print(('Inserting lap from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
            cur.execute(sql,(avg_cadence,list(avg_cadence_position),avg_combined_pedal_smoothness,avg_heart_rate,avg_left_pco,avg_left_pedal_smoothness,list(avg_left_power_phase),
                                list(avg_left_power_phase_peak),avg_left_torque_effectiveness,avg_power,list(avg_power_position),avg_right_pco,avg_right_pedal_smoothness,list(avg_right_power_phase),list(avg_right_power_phase_peak),
                                avg_right_torque_effectiveness,avg_speed,end_position_lat,end_position_long,enhanced_avg_speed,enhanced_max_speed,event,event_group,event_type,intensity,lap_trigger,left_right_balance,
                                max_cadence,list(max_cadence_position),max_heart_rate,max_power,list(max_power_position),max_speed,message_index,normalized_power,sport,stand_count,start_position_lat,start_position_long,
                                start_time,time_standing,timestamp,total_ascent,total_calories,total_cycles,total_descent,total_distance,total_elapsed_time,total_fat_calories,total_timer_time,total_work,gc_activity_id))
            conn.commit()       
            # close the communication with the PostgreSQL
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

     # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()  
                
    with StdoutRedirection(ath_un):
        print(('--- All lap data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
    with ProgressStdoutRedirection(ath_un):
        print(('--- All lap data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
Exemplo n.º 17
0
def gc_dailysummary_insert(file_path, ath_un, db_host, db_name, superuser_un,
                           superuser_pw, encr_pass):
    """ Connect to the PostgreSQL database server """
    db_name = (db_name)
    conn = None
    #Lists to store the data from xml
    startGMT = []
    endGMT = []
    activityLevelConstant = []
    steps = []
    primaryActivityLevel = []

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    # PG Amend path for postgres "pg_read_file()" function.
    text = file_path
    head, sep, tail = text.partition('temp/')
    pg_read_file = (tail, )

    xpath_sql = """
        INSERT INTO garmin_connect_daily_summary (athlete_id,start_gmt,end_gmt,activity_level_constant,steps,primary_activity_level)
        
        SELECT
        
        (select id from athlete where ath_un=%s),
        to_char(to_timestamp(unnest (xpath('//*[local-name()="item"]/*[local-name()="startGMT"]/text()', x))::text::text,'YYYY-MM-DD HH24:MI:SS'),'YYYY-MM-DD HH24:MI:SS') AS start_gmt,
        to_char(to_timestamp(unnest (xpath('//*[local-name()="item"]/*[local-name()="endGMT"]/text()', x))::text::text,'YYYY-MM-DD HH24:MI:SS'),'YYYY-MM-DD HH24:MI:SS') AS end_gmt,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="activityLevelConstant"]/text()', x))::text::bool AS activity_level_constant,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="steps"]/text()', x))::text::int AS steps,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="primaryActivityLevel"]/text()', x))::text::text AS primary_activity_level
        
        
        FROM UNNEST (xpath('//*[local-name()="root"]', pg_read_file(%s)::xml)) x

        ON CONFLICT (athlete_id,start_gmt) DO NOTHING;
                    
        """

    pd_df_sql = """
        INSERT INTO garmin_connect_daily_summary (athlete_id,start_gmt,end_gmt,activity_level_constant,steps,primary_activity_level)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s)
        
        ON CONFLICT (athlete_id,start_gmt) DO NOTHING;

        """

    sql_time_diff = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
        """

    sql_sleep_tracking = """
        INSERT INTO gc_original_wellness_sleep_tracking (timestamp_gmt,timestamp_local,activity_tracking_id,hr_tracking_id,stress_tracking_id)

        VALUES
            (%s,%s,(select id from gc_original_wellness_activity_tracking where timestamp=%s),(select id from gc_original_wellness_hr_tracking where timestamp=%s),(select id from gc_original_wellness_stress_tracking where timestamp=%s))

        ON CONFLICT (timestamp_gmt) DO NOTHING;
        """

    #Parse the XML document, and append the data to lists
    xml_document = parse(file_path)
    for item in xml_document.iterfind('item'):
        startGMT.append(item.findtext('startGMT'))
        endGMT.append(item.findtext('endGMT'))
        activityLevelConstant.append(item.findtext('activityLevelConstant'))
        steps.append(item.findtext('steps'))
        primaryActivityLevel.append(item.findtext('primaryActivityLevel'))

    #Create pandas dataframe and populate with data from lists
    df = pd.DataFrame({
        'startGMT': startGMT,
        'endGMT': endGMT,
        'activityLevelConstant': activityLevelConstant,
        'steps': steps,
        'primaryActivityLevel': primaryActivityLevel
    })
    #Convert startGMT and endGMT to datetime
    df['startGMT'] = pd.to_datetime(df['startGMT'])
    df['endGMT'] = pd.to_datetime(df['endGMT'])
    #Locate gmt midnight
    midnight_gmt_dt = df['startGMT'].iloc[0]
    #Retrieve date form the xml file name
    date_str = file_path[-22:-12]
    #Convert local date midnight to datetime
    midnight_local_dt = datetime.datetime.strptime(date_str, "%Y-%m-%d")
    #Get time difference (timedelta) by subtracting gmt from local
    local_gmt_time_diff = midnight_local_dt - midnight_gmt_dt

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)
        conn.tpc_begin(conn.xid(42, 'transaction ID', 'connection 1'))

        ### Insert sleep tracking data to gc_original_wellness_sleep_tracking ###
        #Iterate through rows in pandas df
        for row in df.itertuples():
            row_startGMT = row.startGMT
            row_endGMT = row.endGMT
            row_activityLevelConstant = row.activityLevelConstant
            row_steps = row.steps
            row_primaryActivityLevel = row.primaryActivityLevel

            ### Insert daily summary data to garmin_connect_daily_summary ###
            # create a cursor
            cur = conn.cursor()
            # execute a statement
            with StdoutRedirection(ath_un):
                print('Inserting Daily Summary Data into postgreSQL:')
            cur.execute(
                pd_df_sql,
                (ath_un, row_startGMT, row_endGMT, row_activityLevelConstant,
                 row_steps, row_primaryActivityLevel))
            #conn.commit()
            # close the communication with the PostgreSQL
            cur.close()

            #If activity level eq sleeping, extract start and end times (15min intervals)
            if row_primaryActivityLevel == 'sleeping':
                #Break down 15min intervals in to 1min and convert each to local time
                while (row_startGMT < row_endGMT):
                    row_startGMT = row_startGMT + datetime.timedelta(minutes=1)
                    row_startLocal = row_startGMT + local_gmt_time_diff
                    #Convert datetimes to strings
                    row_startGMT_str = datetime.datetime.strftime(
                        (row_startGMT), "%Y-%m-%d %H:%M:%S")
                    row_startLocal_str = datetime.datetime.strftime(
                        (row_startLocal), "%Y-%m-%d %H:%M:%S")
                    # create a cursor
                    cur = conn.cursor()
                    # execute a statement
                    with StdoutRedirection(ath_un):
                        print('Inserting Sleep Tracking Data into postgreSQL:')
                    cur.execute(
                        sql_sleep_tracking,
                        (row_startGMT_str, row_startLocal_str,
                         row_startGMT_str, row_startGMT_str, row_startGMT_str))
                    # close the communication with the PostgreSQL
                    cur.close()

        ### Insert Local to GMT time differnce to gmt_local_time_difference table ###
        # create a cursor
        cur = conn.cursor()
        # execute a statement
        with StdoutRedirection(ath_un):
            print('Inserting Local to GMT time difference into postgreSQL:')
        cur.execute(sql_time_diff, (ath_un, date_str, midnight_local_dt,
                                    midnight_gmt_dt, local_gmt_time_diff))
        # close the communication with the PostgreSQL
        cur.close()
        with StdoutRedirection(ath_un):
            print('Local to GMT time difference Data Inserted Successfully')
        with ProgressStdoutRedirection(ath_un):
            print('Local to GMT time difference Data Inserted Successfully')
        try:
            conn.tpc_prepare()
        except (Exception, psycopg2.DatabaseError) as error:
            conn.tpc_rollback()
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(error)))
        else:
            conn.tpc_commit()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 18
0
def gc_original_wellness_insert(file_path, ath_un, db_host, db_name,
                                superuser_un, superuser_pw, encr_pass):
    start = time.time()
    file2import = (file_path)
    athlete_id = (ath_un)
    db_name = (db_name)
    active_calories, active_time, activity_type, distance, duration_min, steps, timestamp, heart_rate, timestamp_16, intensity, stress_level_time, stress_level_value = [
        None
    ] * 12
    current_activity_type_intensity = (None)
    fitfile = FitFile(file2import)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    messages = fitfile.get_messages()

    conn = None

    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)
    #two-phase insert
    conn.tpc_begin(conn.xid(42, 'transaction ID', ath_un))

    # Get all data messages
    for message in fix_times(messages):
        # Go through all the data entries in this record
        for message_data in message:
            if message_data.name == 'active_calories':
                active_calories = message_data.value
            if message_data.name == 'active_time':
                active_time = message_data.value
            if message_data.name == 'activity_type':
                activity_type = message_data.value
            if message_data.name == 'distance':
                distance = message_data.value
            if message_data.name == 'duration_min':
                duration_min = message_data.value
            if message_data.name == 'steps':
                steps = message_data.value
            if message_data.name == 'timestamp':
                timestamp = message_data.value
            if message_data.name == 'heart_rate':
                heart_rate = message_data.value
            if message_data.name == 'timestamp_16':
                timestamp_16 = message_data.value
            if message_data.name == 'intensity':
                intensity = message_data.value
            if message_data.name == 'stress_level_time':
                stress_level_time = message_data.value
            if message_data.name == 'stress_level_value':
                stress_level_value = message_data.value
            if message_data.name == 'current_activity_type_intensity':
                current_activity_type_intensity = message_data.value

        sql_act_type_summary = """

            INSERT INTO gc_original_wellness_act_type_summary(active_calories, active_time, activity_type, distance, duration_min, steps, timestamp,athlete_id)

            VALUES
            (%s,%s,%s,%s,%s,%s,%s,(select id from athlete where ath_un=%s))
            
            ON CONFLICT (athlete_id,activity_type,timestamp) DO NOTHING;

            """
        sql_hr_tracking = """

            INSERT INTO gc_original_wellness_hr_tracking(heart_rate, timestamp_16, timestamp,athlete_id)

            VALUES
            (%s,%s,%s,(select id from athlete where ath_un=%s))
            
            ON CONFLICT (athlete_id,timestamp) DO NOTHING;

            """
        sql_activity_tracking = """

            INSERT INTO gc_original_wellness_activity_tracking(activity_type, current_activity_type_intensity,intensity,timestamp,athlete_id)

            VALUES
            (%s,%s,%s,%s,(select id from athlete where ath_un=%s))
            
            ON CONFLICT (athlete_id,timestamp) DO NOTHING;

            """

        sql_stress_tracking = """

            INSERT INTO gc_original_wellness_stress_tracking(stress_level_time, stress_level_value,timestamp,athlete_id)

            VALUES
            (%s,%s,%s,(select id from athlete where ath_un=%s))
            
            ON CONFLICT (athlete_id,timestamp) DO NOTHING;

            """
        try:
            # create a cursor
            cur = conn.cursor()
            if duration_min is not None:
                # execute a statement
                with StdoutRedirection(ath_un):
                    print(('Inserting Activity Type Summary record : ' +
                           ' with timestamp:' + str(timestamp)))
                cur.execute(
                    sql_act_type_summary,
                    (active_calories, active_time, activity_type, distance,
                     duration_min, steps, timestamp, athlete_id))
                # close the communication with the PostgreSQL
                cur.close()
                active_calories, active_time, activity_type, distance, duration_min, steps, timestamp, heart_rate, timestamp_16, intensity, stress_level_time, stress_level_value = [
                    None
                ] * 12
                current_activity_type_intensity = (None)

            if heart_rate is not None:
                # execute a statement
                with StdoutRedirection(ath_un):
                    print(('Inserting Heart Rate Tracking record : ' +
                           ' with timestamp:' + str(timestamp)))
                cur.execute(sql_hr_tracking,
                            (heart_rate, timestamp_16, timestamp, athlete_id))
                # close the communication with the PostgreSQL
                cur.close()
                active_calories, active_time, activity_type, distance, duration_min, steps, timestamp, heart_rate, timestamp_16, intensity, stress_level_time, stress_level_value = [
                    None
                ] * 12
                current_activity_type_intensity = (None)

            if intensity is not None:
                # execute a statement
                with StdoutRedirection(ath_un):
                    print(('Inserting Activity Tracking record : ' +
                           ' with timestamp:' + str(timestamp)))
                cur.execute(
                    sql_activity_tracking,
                    (activity_type, list(current_activity_type_intensity),
                     intensity, timestamp, athlete_id))
                # close the communication with the PostgreSQL
                cur.close()
                active_calories, active_time, activity_type, distance, duration_min, steps, timestamp, heart_rate, timestamp_16, intensity, stress_level_time, stress_level_value = [
                    None
                ] * 12
                current_activity_type_intensity = (None)

            if stress_level_value is not None:
                # execute a statement
                with StdoutRedirection(ath_un):
                    print(('Inserting Stress Tracking record : ' +
                           ' with timestamp:' + str(timestamp)))
                cur.execute(sql_stress_tracking,
                            (stress_level_time, stress_level_value, timestamp,
                             athlete_id))
                # close the communication with the PostgreSQL
                cur.close()
                active_calories, active_time, activity_type, distance, duration_min, steps, timestamp, heart_rate, timestamp_16, intensity, stress_level_time, stress_level_value = [
                    None
                ] * 12
                current_activity_type_intensity = (None)

        except (Exception, psycopg2.DatabaseError) as error:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(error)))

    #two-phase insert commit or rollback
    try:
        conn.tpc_prepare()
    except (Exception, psycopg2.DatabaseError) as error:
        conn.tpc_rollback()
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    else:
        try:
            conn.tpc_commit()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()

    with StdoutRedirection(ath_un):
        print('--- All wellness record data inserted successfully. ---')
    with ProgressStdoutRedirection(ath_un):
        print('--- All wellness record data inserted successfully. ---')

    end = time.time()
    with ProgressStdoutRedirection(ath_un):
        print('\nExecution_time:')
    with ProgressStdoutRedirection(ath_un):
        print((end - start))
Exemplo n.º 19
0
    def parse_dataset(dataset, conn, start_date_dt, end_date_dt, data_source):
        for row in dataset.itertuples():
            glucose_nmol = None
            glucose_nmol_avrg = None
            ketone_nmol = None
            utc_time_str = None
            if data_source == 'glimp_android':
                record_type = row._9  #Current(5) or 15min average(3)
                if record_type == 5:
                    glucose_nmol = round((float(row._6) / 18), 2)
                else:
                    glucose_nmol_avrg = round((float(row._6) / 18), 2)
                local_time = row._2
                epoch_time = row._3
                local_time_dt = datetime.datetime.strptime((local_time),
                                                           "%d/%m/%Y %H.%M.%S")
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                utc_time_str = datetime.datetime.utcfromtimestamp(
                    epoch_time).strftime('%Y-%m-%d %H:%M:%S')
            else:
                ketone_nmol = row._7
                glucose_nmol = row._6
                glucose_nmol_avrg = row._5
                local_time = row._3
                local_time_dt = local_time.to_pydatetime(
                )  #Convert from pandas to python datetime
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                local_date_str = datetime.datetime.strftime((local_time_dt),
                                                            "%Y-%m-%d")
                try:
                    # create a cursor
                    cur = conn.cursor()
                    # execute a statement
                    cur.execute(sql_local2gmt, (local_date_str, ))
                    result = cur.fetchone()
                    # close the communication with the PostgreSQL
                    conn.commit()
                    cur.close()
                    if result is not None:
                        utc_time_dt = local_time_dt - result[0]
                        utc_time_str = datetime.datetime.strftime(
                            (utc_time_dt), "%Y-%m-%d %H:%M:%S")
                except (Exception, psycopg2.DatabaseError) as error:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + '  ' + str(error)))

            # Compare if the Record Date is within the download Date range. This only works if parsed from top of the csv file.
            # The most recent record at the top
            if local_time_dt > end_date_dt + datetime.timedelta(days=1):
                continue
            if local_time_dt < start_date_dt:
                break

            with ProgressStdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))
            with StdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))

            try:
                # create a cursor
                cur = conn.cursor()
                # execute a statement
                cur.execute(
                    sql,
                    (ath_un, local_time_str, glucose_nmol, glucose_nmol_avrg,
                     ketone_nmol, data_source, utc_time_str))
                conn.commit()
                # close the communication with the PostgreSQL
                cur.close()

            except (Exception, psycopg2.DatabaseError) as error:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + '  ' + str(error)))
Exemplo n.º 20
0
def gc_original_session_insert(file_path, activity_id, ath_un, db_host,
                               db_name, superuser_un, superuser_pw, encr_pass):
    file2import = (file_path)
    gc_activity_id = (activity_id)
    athlete_id = (ath_un)
    db_name = (db_name)
    avg_cadence, avg_combined_pedal_smoothness, avg_heart_rate, avg_left_pco, avg_left_pedal_smoothness, avg_left_torque_effectiveness, avg_power, avg_right_pco, avg_right_pedal_smoothness, avg_right_torque_effectiveness, avg_speed, enhanced_avg_speed, enhanced_max_speed, event, event_group, event_type, first_lap_index, intensity_factor, left_right_balance, max_cadence, max_heart_rate, max_power, max_speed, message_index, nec_lat, nec_long, normalized_power, num_laps, sport, stand_count, start_position_lat, start_position_long, start_time, sub_sport, swc_lat, swc_long, threshold_power, time_standing, timestamp, total_ascent, total_calories, total_cycles, total_descent, total_distance, total_elapsed_time, total_fat_calories, total_timer_time, total_work, training_stress_score, trigger = [
        None
    ] * 50
    avg_cadence_position = (None, None)
    avg_left_power_phase = (None, None, None, None)
    avg_left_power_phase_peak = (None, None, None, None)
    avg_power_position = (None, None)
    avg_right_power_phase = (None, None, None, None)
    avg_right_power_phase_peak = (None, None, None, None)
    max_cadence_position = (None, None)
    max_power_position = (None, None)
    fitfile = FitFile(file2import)
    enhanced_altitude = []

    start_position_lat_degr = None
    start_position_long_degr = None
    end_time_gmt_str = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    conn = None

    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    tz = tzwhere.tzwhere()

    # Get all data messages that are of type record
    try:
        for record in fitfile.get_messages('record'):
            # Go through all the data entries in this record
            for record_data in record:
                # Append altitude values to list
                if record_data.name == 'enhanced_altitude':
                    enhanced_altitude.append(record_data.value)
        #Calculate avereage altitude for a better weather data accuracy
        avg_altitude = int(mean(enhanced_altitude))
        fitfile.close
    except:
        avg_altitude = None

    fitfile = FitFile(file2import)
    # Get all data messages that are of type session
    for record in fitfile.get_messages('session'):
        # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'avg_cadence':
                avg_cadence = record_data.value
            if record_data.name == 'avg_cadence_position':
                avg_cadence_position = record_data.value
            if record_data.name == 'avg_combined_pedal_smoothness':
                avg_combined_pedal_smoothness = record_data.value
            if record_data.name == 'avg_heart_rate':
                avg_heart_rate = record_data.value
            if record_data.name == 'avg_left_pco':
                avg_left_pco = record_data.value
            if record_data.name == 'avg_left_pedal_smoothness':
                avg_left_pedal_smoothness = record_data.value
            if record_data.name == 'avg_left_power_phase':
                avg_left_power_phase = record_data.value
            if record_data.name == 'avg_left_power_phase_peak':
                avg_left_power_phase_peak = record_data.value
            if record_data.name == 'avg_left_torque_effectiveness':
                avg_left_torque_effectiveness = record_data.value
            if record_data.name == 'avg_power':
                avg_power = record_data.value
            if record_data.name == 'avg_power_position':
                avg_power_position = record_data.value
            if record_data.name == 'avg_right_pco':
                avg_right_pco = record_data.value
            if record_data.name == 'avg_right_pedal_smoothness':
                avg_right_pedal_smoothness = record_data.value
            if record_data.name == 'avg_right_power_phase':
                avg_right_power_phase = record_data.value
            if record_data.name == 'avg_right_power_phase_peak':
                avg_right_power_phase_peak = record_data.value
            if record_data.name == 'avg_right_torque_effectiveness':
                avg_right_torque_effectiveness = record_data.value
            if record_data.name == 'avg_speed':
                avg_speed = record_data.value
            if record_data.name == 'enhanced_avg_speed':
                enhanced_avg_speed = record_data.value
            if record_data.name == 'enhanced_max_speed':
                enhanced_max_speed = record_data.value
            if record_data.name == 'event':
                event = record_data.value
            if record_data.name == 'event_group':
                event_group = record_data.value
            if record_data.name == 'event_type':
                event_type = record_data.value
            if record_data.name == 'first_lap_index':
                first_lap_index = record_data.value
            if record_data.name == 'intensity_factor':
                intensity_factor = record_data.value
            if record_data.name == 'left_right_balance':
                left_right_balance = record_data.value
            if record_data.name == 'max_cadence':
                max_cadence = record_data.value
            if record_data.name == 'max_cadence_position':
                max_cadence_position = record_data.value
            if record_data.name == 'max_heart_rate':
                max_heart_rate = record_data.value
            if record_data.name == 'max_power':
                max_power = record_data.value
            if record_data.name == 'max_power_position':
                max_power_position = record_data.value
            if record_data.name == 'max_speed':
                max_speed = record_data.value
            if record_data.name == 'message_index':
                message_index = record_data.value
            if record_data.name == 'nec_lat':
                nec_lat = record_data.value
            if record_data.name == 'nec_long':
                nec_long = record_data.value
            if record_data.name == 'normalized_power':
                normalized_power = record_data.value
            if record_data.name == 'num_laps':
                num_laps = record_data.value
            if record_data.name == 'sport':
                sport = record_data.value
            if record_data.name == 'stand_count':
                stand_count = record_data.value
            if record_data.name == 'start_position_lat':
                start_position_lat = record_data.value
                if start_position_lat is not None:
                    #Convert semicircles to degrees
                    start_position_lat_degr = start_position_lat * (180. /
                                                                    (2**31))
                else:
                    start_position_lat_degr = None
            if record_data.name == 'start_position_long':
                start_position_long = record_data.value
                if start_position_long is not None:
                    #Convert semicircles to degrees
                    start_position_long_degr = start_position_long * (180. /
                                                                      (2**31))
                else:
                    start_position_long_degr = None
            if record_data.name == 'sub_sport':
                sub_sport = record_data.value
            if record_data.name == 'swc_lat':
                swc_lat = record_data.value
            if record_data.name == 'swc_long':
                swc_long = record_data.value
            if record_data.name == 'threshold_power':
                threshold_power = record_data.value
            if record_data.name == 'time_standing':
                time_standing = record_data.value
            if record_data.name == 'timestamp':  #Activity end time
                timestamp = record_data.value
            if record_data.name == 'total_ascent':
                total_ascent = record_data.value
            if record_data.name == 'total_calories':
                total_calories = record_data.value
            if record_data.name == 'total_cycles':
                total_cycles = record_data.value
            if record_data.name == 'total_descent':
                total_descent = record_data.value
            if record_data.name == 'total_distance':
                total_distance = record_data.value
            if record_data.name == 'total_elapsed_time':
                total_elapsed_time = record_data.value
            if record_data.name == 'total_fat_calories':
                total_fat_calories = record_data.value
            if record_data.name == 'total_timer_time':
                total_timer_time = record_data.value
            if record_data.name == 'total_work':
                total_work = record_data.value
            if record_data.name == 'training_stress_score':
                training_stress_score = record_data.value
            if record_data.name == 'trigger':
                trigger = record_data.value

        try:
            #Get timezone from lat/long
            timezone = None
            local_time_str = None
            if start_position_lat_degr is not None and start_position_long_degr is not None:
                timezone = tz.tzNameAt(start_position_lat_degr,
                                       start_position_long_degr)
                local_tz = pytz.timezone(timezone)

                gmt_time_dt = datetime.datetime.strptime((str(timestamp)),
                                                         "%Y-%m-%d %H:%M:%S")
                #Calculate activity_end time
                activity_duration_dt = datetime.timedelta(
                    0, int(total_elapsed_time))
                start_time_gmt_dt = gmt_time_dt - activity_duration_dt
                start_time_gmt_str = datetime.datetime.strftime(
                    (start_time_gmt_dt), "%Y-%m-%d %H:%M:%S")

                #Get local time from activity start time(gmt)
                local_dt = start_time_gmt_dt.replace(
                    tzinfo=pytz.utc).astimezone(local_tz)
                local_dt_norm = local_tz.normalize(local_dt)
                local_time_str = datetime.datetime.strftime(
                    (local_dt_norm), "%Y-%m-%d %H:%M:%S")

        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))
            pass

        sql = """

            INSERT INTO garmin_connect_original_session (avg_cadence,avg_cadence_position,avg_combined_pedal_smoothness,avg_heart_rate,avg_left_pco,
            avg_left_pedal_smoothness,avg_left_power_phase,avg_left_power_phase_peak,avg_left_torque_effectiveness,avg_power,avg_power_position,avg_right_pco,
            avg_right_pedal_smoothness,avg_right_power_phase,avg_right_power_phase_peak,avg_right_torque_effectiveness,avg_speed,enhanced_avg_speed,
            enhanced_max_speed,event,event_group,event_type, first_lap_index,intensity_factor,left_right_balance,max_cadence,max_cadence_position, max_heart_rate,
            max_power,max_power_position,max_speed,message_index,nec_lat,nec_long,normalized_power,num_laps,sport,stand_count,start_position_lat,
            start_position_long,start_time,sub_sport,swc_lat,swc_long,threshold_power,time_standing,timestamp,total_ascent,total_calories,total_cycles,
            total_descent,total_distance,total_elapsed_time,total_fat_calories,total_timer_time,total_work,training_stress_score,trigger,gc_activity_id,athlete_id)

            VALUES
            (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
            %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,(select id from athlete where ath_un=%s))

            ON CONFLICT (gc_activity_id) DO NOTHING;
            """

        sql_timezone = """
            INSERT INTO timezones(gc_activity_id,strava_activity_id,timestamp_local,timestamp_gmt,timezone,long_degr,lat_degr,alt_avrg,end_time_gmt)
            
            VALUES
            (%s,null,%s,%s,%s,%s,%s,%s,%s)

            ON CONFLICT (timestamp_gmt,long_degr,lat_degr) DO NOTHING;
            """

        sql_insert_utc_offset = """
            INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

            VALUES
                ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

            ON CONFLICT (local_date) DO NOTHING;
            """
        try:
            #Insert session data into garmin_connect_original_session table
            cur = conn.cursor()
            # execute a statement
            with StdoutRedirection(ath_un):
                print(('Inserting session: ' + str(gc_activity_id) +
                       ' with timestamp:' + str(timestamp)))
            with ProgressStdoutRedirection(ath_un):
                print(('Inserting session: ' + str(gc_activity_id) +
                       ' with timestamp:' + str(timestamp)))

            cur.execute(
                sql,
                (avg_cadence, list(avg_cadence_position),
                 avg_combined_pedal_smoothness, avg_heart_rate, avg_left_pco,
                 avg_left_pedal_smoothness, list(avg_left_power_phase),
                 list(avg_left_power_phase_peak),
                 avg_left_torque_effectiveness, avg_power,
                 list(avg_power_position), avg_right_pco,
                 avg_right_pedal_smoothness, list(avg_right_power_phase),
                 list(avg_right_power_phase_peak),
                 avg_right_torque_effectiveness, avg_speed, enhanced_avg_speed,
                 enhanced_max_speed, event, event_group, event_type,
                 first_lap_index,
                 intensity_factor, left_right_balance, max_cadence,
                 list(max_cadence_position), max_heart_rate, max_power,
                 list(max_power_position), max_speed, message_index, nec_lat,
                 nec_long, normalized_power, num_laps, sport, stand_count,
                 start_position_lat, start_position_long, start_time,
                 sub_sport, swc_lat, swc_long, threshold_power, time_standing,
                 timestamp, total_ascent, total_calories, total_cycles,
                 total_descent, total_distance, total_elapsed_time,
                 total_fat_calories, total_timer_time, total_work,
                 training_stress_score, trigger, gc_activity_id, athlete_id))
            conn.commit()
            # close the communication with the PostgreSQL
            cur.close()

            if timezone is not None:
                #Insert timezone and local time into timezones table
                cur = conn.cursor()
                # execute a statement
                with StdoutRedirection(ath_un):
                    print(('Inserting timezone info: ' + str(timezone) +
                           ' and local tz timestamp:' + str(local_time_str)))
                with ProgressStdoutRedirection(ath_un):
                    print(('Inserting timezone info: ' + str(timezone) +
                           ' and local tz timestamp:' + str(local_time_str)))
                cur.execute(
                    sql_timezone,
                    (gc_activity_id, local_time_str, start_time_gmt_str,
                     timezone, start_position_long_degr,
                     start_position_lat_degr, avg_altitude, timestamp))
                conn.commit()
                # close the communication with the PostgreSQL
                cur.close()

            #Insert utc offset into gmt_local_time_difference table if the record not already present
            #get utc offset
            local_dt_norm = local_dt_norm.replace(tzinfo=None)
            utc_offset = local_dt_norm - start_time_gmt_dt
            #get local date
            local_date = local_dt_norm.date()
            #get local midnight
            local_midnight = local_dt_norm.replace(hour=00,
                                                   minute=00,
                                                   second=00)
            gmt_midnight = local_midnight - utc_offset
            cur = conn.cursor()
            cur.execute(
                sql_insert_utc_offset,
                (ath_un, local_date, local_midnight, gmt_midnight, utc_offset))
            conn.commit()
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()

    with StdoutRedirection(ath_un):
        print(('--- All summary data for session: ' + str(gc_activity_id) +
               ' inserted successfully. ---'))
    with ProgressStdoutRedirection(ath_un):
        print(('--- All summary data for session: ' + str(gc_activity_id) +
               ' inserted successfully. ---'))
Exemplo n.º 21
0
def gc_wellness_insert(file_path, ath_un, db_host, db_name, superuser_un,
                       superuser_pw, encr_pass):
    db_name = (db_name)
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    #Create empty dataframe using sorted db fields as headers. This will be converted to empty dataframe and merged with xml dataframe
    db_fields_list = {
        'athlete_id', 'calendar_date', 'wellness_min_heart_rate',
        'wellness_total_steps_goal', 'sleep_sleep_duration',
        'wellness_floors_descended', 'wellness_floors_ascended',
        'wellness_bodybattery_drained', 'wellness_max_heart_rate',
        'wellness_total_distance', 'wellness_vigorous_intensity_minutes',
        'food_calories_consumed', 'wellness_user_intensity_minutes_goal',
        'wellness_common_active_calories', 'food_calories_remaining',
        'wellness_moderate_intensity_minutes', 'wellness_bmr_calories',
        'common_total_distance', 'wellness_active_calories',
        'wellness_total_calories', 'wellness_resting_heart_rate',
        'wellness_max_stress', 'wellness_average_steps',
        'wellness_max_avg_heart_rate', 'wellness_abnormalhr_alerts_count',
        'wellness_total_steps', 'wellness_average_stress',
        'common_total_calories', 'wellness_user_floors_ascended_goal',
        'wellness_min_avg_heart_rate', 'wellness_bodybattery_charged'
    }
    #Sort alphabeticaly
    db_fields_list = sorted(db_fields_list)
    db_df = pd.DataFrame(columns=db_fields_list)

    #List to store XML tags for xml_df column names
    column_list = []
    #List to store XML values in xml_df dataframe and used as params for SQL insert/update query
    xml_list = []

    xml_list.append(ath_un)
    column_list.append('athlete_id')

    #Parse the XML document, and append the data to column_list and xml_list lists.
    root = parse(file_path).getroot()
    for startDate in root.iter('statisticsStartDate'):
        date = startDate.text
        xml_list.append(date)
        column_list.append('calendar_date')
    for category in root.findall('.//metricsMap/*'):
        if category.tag != 'SLEEP_SLEEP_DURATION':
            column_list.append(category.tag.lower())
            for tag in root.iter(category.tag):
                for item in tag.iterfind('item'):
                    xml_list.append(item.findtext('value'))
        else:
            column_list.append(category.tag.lower())
            for tag in root.iter(category.tag):
                for item in tag.iterfind('item'):
                    if item.findtext('calendarDate') == date:
                        xml_list.append(item.findtext('value'))

    #Combine xml_list and column_list in a xml_df dataframe
    xml_df = pd.DataFrame(xml_list).T  # Write in DF and transpose it
    xml_df.columns = column_list  # Update column names
    xml_df = xml_df.reindex(sorted(xml_df.columns),
                            axis=1)  # Sort alphabeticaly
    #Combine the dataframes
    combined_df = db_df.append(xml_df)
    #Drop all columns that do not exist as DB fields
    combined_df = combined_df[db_fields_list]
    #Export all values to list
    df2list = combined_df.values.tolist()
    #Flatten the list
    df2list = [item for sublist in df2list for item in sublist]

    #Build a list of parameters to pass to sql query
    query_params = df2list + df2list
    query_params.append(date)

    # PG Amend path for postgres "pg_read_file()" function.
    text = file_path
    head, sep, tail = text.partition('temp/')
    pg_read_file = (tail, )

    list_sql = """
        INSERT INTO garmin_connect_wellness (athlete_id, calendar_date, common_total_calories, common_total_distance, food_calories_consumed, 
        food_calories_remaining, sleep_duration, wellness_abnormalhr_alerts_count, wellness_active_calories, wellness_average_steps, wellness_average_stress, 
        wellness_bmr_calories, wellness_body_battery_drained, wellness_bodybattery_charged, wellness_common_active_calories, wellness_floors_ascended, 
        wellness_floors_descended, wellness_max_avg_heart_rate, wellness_max_heart_rate, wellness_max_stress, wellness_min_avg_heart_rate, 
        wellness_min_heart_rate, wellness_moderate_intensity_minutes, wellness_resting_heart_rate, wellness_total_calories, wellness_total_distance, 
        wellness_total_steps, wellness_total_steps_goal, wellness_user_floors_ascended_goal, wellness_user_intensity_goal, wellness_vigorous_intensity_minutes)
        
        VALUES ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (calendar_date) DO UPDATE

        SET (athlete_id, calendar_date, common_total_calories, common_total_distance, food_calories_consumed, food_calories_remaining, sleep_duration, 
        wellness_abnormalhr_alerts_count, wellness_active_calories, wellness_average_steps, wellness_average_stress, wellness_bmr_calories, 
        wellness_body_battery_drained, wellness_bodybattery_charged, wellness_common_active_calories, wellness_floors_ascended, wellness_floors_descended, 
        wellness_max_avg_heart_rate, wellness_max_heart_rate, wellness_max_stress, wellness_min_avg_heart_rate, wellness_min_heart_rate, 
        wellness_moderate_intensity_minutes, wellness_resting_heart_rate, wellness_total_calories, wellness_total_distance, wellness_total_steps, 
        wellness_total_steps_goal, wellness_user_floors_ascended_goal, wellness_user_intensity_goal, wellness_vigorous_intensity_minutes)
        = ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        WHERE garmin_connect_wellness.calendar_date
        = %s;
        """

    try:
        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with StdoutRedirection(ath_un):
            print('Inserting Wellness Data into postgreSQL:')
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Wellness Data into postgreSQL:')
        #cur.execute(xpath_sql,(athlete_id,pg_read_file,athlete_id,file2import,file2import,file2import))
        cur.execute(list_sql, (query_params))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print('Wellness Data Inserted Successfully')
            with ProgressStdoutRedirection(ath_un):
                print('Wellness Data Inserted Successfully')
Exemplo n.º 22
0
def restore_db_schema(ath_un, db_host, db_name, superuser_un, superuser_pw):
    conn = None
    username = (ath_un)
    db_name = (db_name)
    head, sep, tail = username.partition('@')
    db_username = head

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    #PG: Read SQL query to restore db schema from file
    sql_file = open('db_schema.sql', 'r')
    sql_restore_db_schema = s = " ".join(sql_file.readlines())

    #PG: Read SQL query to create 1min data view from file
    sql_file_view_streams = open('db_create_view_streams.sql', 'r')
    sql_create_view_streams = s = " ".join(sql_file_view_streams.readlines())

    #PG: Read SQL query to create 1day data view from file
    sql_file_view_summary = open('db_create_view_summary.sql', 'r')
    sql_create_view_summary = s = " ".join(sql_file_view_summary.readlines())
    sql_grant_userpriv = "GRANT ALL PRIVILEGES ON DATABASE \"" + db_name + "\" to \"" + db_username + "\";"
    sql_grant_table_permissions = "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"" + db_username + "\";"
    sql_revoke_public = "REVOKE ALL ON DATABASE \"" + db_name + "\" FROM PUBLIC;"
    sql_revoke_public_1 = "REVOKE ALL ON DATABASE postgres FROM PUBLIC;"

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)
        conn.autocommit = True

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Restoring DB schema...')

        cur.execute(sql_restore_db_schema)
        cur.execute(sql_create_view_streams)
        cur.execute(sql_create_view_summary)
        cur.execute(sql_grant_userpriv)
        cur.execute(sql_grant_table_permissions)
        cur.execute(sql_revoke_public)
        cur.execute(sql_revoke_public_1)

        # close the communication with the PostgreSQL
        cur.close()
        sql_file.close()
        sql_file_view_streams.close()
        sql_create_view_summary.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 23
0
def cstm_data_insert(ath_un, usr_params_str, usr_params_int, db_host,
                     superuser_un, superuser_pw):
    cstm_dbx_link = usr_params_str[0]
    download_folder = usr_params_str[1]
    download_folder = os.path.join(download_folder, 'CSTM_Temp_CSV')
    archive_folder = os.path.join(download_folder, 'Archive')
    table_name = usr_params_str[2]
    date_format = usr_params_str[3]
    timezone = usr_params_str[4]
    datetime_clmn = usr_params_str[5]
    unique = usr_params_int
    dataframes = []

    # Download the csv file from dropbox using share link
    with StdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    with ProgressStdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    cstm_dbx_link = cstm_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.Will download as .zip file.
    table_name = 'cstm_{}'.format(table_name)
    response = urllib.request.urlopen(cstm_dbx_link)
    sniffer = csv.Sniffer()
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    archive_params = config(filename="encrypted_settings.ini",
                            section="archive")
    preserve_files = str(archive_params.get("preserve_files"))

    #Insert usr_params to athlete table..
    cstm_user_insert(ath_un, usr_params_str, usr_params_int, table_name,
                     db_host, db_name, superuser_un, superuser_pw)

    conn = None
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql_select_athlete_id = """
        SELECT id FROM athlete WHERE ath_un = %s;
    """

    # Extract the downloaded file in to variable
    downloadFile = io.BytesIO()
    downloadFile.write(response.read())
    downloadFile.seek(0)  #Move pointer to the beginning

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    #Insert the data from csv to dataframe
    if zipfile.is_zipfile(downloadFile):
        with zipfile.ZipFile(downloadFile, 'r') as z:
            #List the content of the zip file
            zipfile_content = z.namelist()
            #Iterate through list and only extract files
            for item in zipfile_content:
                z.extract(item, download_folder)
        downloadFile.close()
        for filename in os.listdir(download_folder):
            if os.path.isfile(os.path.join(download_folder, filename)):
                #Detect whether the csv file has a header.
                with open(os.path.join(download_folder, filename), 'r') as f:
                    has_header = sniffer.has_header(f.read())
                    if has_header:
                        f.seek(0)  #Move pointer to the beginning
                        data_count = csv_match_columns2data(ath_un, f)
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         usecols=data_count)
                    else:
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         header=None)
                dataframes.append(df)
                f.close()
                #Archive csv files
                if preserve_files == "false":
                    #Remove the csv file from download folder
                    os.remove(os.path.join(download_folder, filename))
                    with StdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                else:
                    #Move the csv to archive folder and remove from download
                    if os.path.exists(os.path.join(archive_folder, filename)):
                        #Replace the existing file with the updated one
                        os.remove(os.path.join(archive_folder, filename))
                        os.rename(os.path.join(download_folder, filename),
                                  os.path.join(archive_folder, filename))
                        with StdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                        with ProgressStdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                    else:
                        #Remove the csv file from download folder
                        os.remove(os.path.join(download_folder, filename))
    else:
        filename = re.search(r'([^\/]+)\?.*$', cstm_dbx_link).group(
            1)  #Exctract the filename from the link
        downloadFile.seek(0)  #Move pointer to the beginning
        #Detect whether the csv file has a header.
        wrapper = io.TextIOWrapper(downloadFile, encoding='utf-8')
        has_header = sniffer.has_header(wrapper.read())
        downloadFile.seek(0)  #Move pointer to the beginning
        if has_header:
            data_count = csv_match_columns2data(ath_un, wrapper)
            downloadFile.seek(0)  #Move pointer to the beginning
            df = pd.read_csv(downloadFile, usecols=data_count)
        else:
            df = pd.read_csv(downloadFile, header=None)
        #Archive downloaded File
        if preserve_files == "true":
            downloadFile.seek(0)  #Move pointer to the beginning
            with open(os.path.join(archive_folder, filename), 'wb') as f:
                f.write(downloadFile.read())
                with StdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
                with ProgressStdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
        else:
            with StdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
        wrapper.close()
        dataframes.append(df)
        downloadFile.close()

    #Remove duplicates
    dataframes = pd.concat(dataframes).drop_duplicates().reset_index(drop=True)

    #---- Parse and format datetime values ----
    cur = conn.cursor()
    #Iterate through values in df datetime column
    if datetime_clmn is not None:
        datetime_clmn = int(datetime_clmn)
        #set how to to interpret the the first value in an ambiguous 3-integer date
        #eu date format dd/mm/yyyy
        if date_format == "eu":
            first_val = "day"
        #us date format mm/dd/yyyy
        else:
            first_val = 'month'
        #add timestamp_gmt and timestamp_local columns
        datetime_clmn_gmt = "timestamp_gmt"
        datetime_clmn_local = "timestamp_local"
        dataframes[datetime_clmn_gmt] = np.nan
        dataframes[datetime_clmn_local] = np.nan

        item_row_nr = 0
        for item in dataframes[dataframes.columns[datetime_clmn]]:
            timestamps = datefinder.find_dates(str(item), first=first_val)
            for timestamp in timestamps:
                #If the timestamp is timezone aware
                if timestamp.tzinfo is not None:
                    timestamp_utc = timestamp.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp, "%Y-%m-%d %H:%M:%S.%f %Z")
                #If the timestamp is timezone naive
                else:
                    local_timezone = pytz.timezone(timezone)
                    timestamp_local_tz = local_timezone.localize(timestamp,
                                                                 is_dst=True)
                    timestamp_utc = timestamp_local_tz.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp_local_tz, "%Y-%m-%d %H:%M:%S.%f %Z")

            #insert timestamp_utc and timezone_local values to the newly created df columns
            row_index = dataframes.index[item_row_nr]
            dataframes.loc[row_index, datetime_clmn_gmt] = timestamp_utc
            dataframes.loc[row_index, datetime_clmn_local] = timestamp_local

            item_row_nr += 1

    cur.execute(sql_select_athlete_id, (ath_un, ))
    result = cur.fetchone()
    conn.commit()
    ath_un_id = result[0]
    dataframes['athlete_id'] = ath_un_id
    cur.close()

    #format column names to comply with postgres rules
    dataframes.rename(columns={i: format_column_name(i)
                               for i in df.columns},
                      inplace=True)

    #Get list of unique column names from column numbers (suplied by user) and add athlete_id column to the list
    for i in unique:
        x = unique.index(i)
        unique[x] = dataframes.columns[i]
    unique.append("athlete_id")

    #Check if the table already exists
    cur = conn.cursor()
    cur.execute(
        "SELECT EXISTS (SELECT FROM pg_tables WHERE tablename  = '{}');".
        format(table_name))
    result = cur.fetchone()
    conn.commit()
    if result[0] is True:
        with StdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        pass
    else:
        with StdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        cur.execute(gen_tbl_sql(ath_un, dataframes, table_name, unique))
        conn.commit()
        cur.close()

    insert_df_values(ath_un, conn, dataframes, table_name, unique)

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
Exemplo n.º 24
0
def update_autosynch_prefrnc(ath_un, db_host, db_name, superuser_un,
                             superuser_pw, encrypted_superuser_pw,
                             enable_auto_synch, encr_pass):

    ath_user = (ath_un, )

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    #Do not save pw to db_info if no autosynch or db_host=localhost
    if enable_auto_synch == True:
        if db_host != 'localhost':
            encrypted_superuser_pw = (encrypted_superuser_pw, )
        else:
            encrypted_superuser_pw = None
    else:
        encrypted_superuser_pw = None

    enable_auto_synch = (enable_auto_synch, )
    conn = None

    sql = """

    UPDATE athlete SET auto_sync = %s where ath_un= %s;
            
    """

    sql_postgres_db = """
    
    UPDATE db_info SET db_auto_synch = %s,db_un = %s,db_pw = %s where db_name= %s;

    """
    #Insert user autosynch preference into user db athlete table
    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Auto_Sych preferences into postgreSQL:')
        cur.execute(sql, (enable_auto_synch, ath_user))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    #Insert user autosynch preference into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print(
                'Inserting Auto_Sych preferences into postgres/db_info table:')

        cur_localhost.execute(
            sql_postgres_db,
            (enable_auto_synch, superuser_un, encrypted_superuser_pw, db_name))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
Exemplo n.º 25
0
def gc_bodycomposition_insert(file_path, ath_un, db_host, db_name,
                              superuser_un, superuser_pw, encr_pass):
    """ Connect to the PostgreSQL database server """
    athlete_id = (ath_un, )
    db_name = (db_name)
    body_water = []
    muscle_mass_gm = []
    visceral_fat = []
    weight_gm = []
    bmi = []
    body_fat = []
    physique_rating = []
    timestamp = []
    calendar_date = []
    metabolic_age = []
    bone_mass_gm = []
    caloric_intake = []
    source_type = []
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    # PG Amend path for postgres "pg_read_file()" function.
    text = file_path
    head, sep, tail = text.partition('temp/')
    pg_read_file = (tail, )

    sql = """

        INSERT INTO garmin_connect_body_composition(athlete_id,body_water,muscle_mass_gm,visceral_fat,weight_gm,bmi,
                                                                                        body_fat,physique_rating,timestamp,calendar_date,metabolic_age,
                                                                                        bone_mass_gm,caloric_intake,source_type)

        SELECT

        (select id from athlete where ath_un=%s),
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bodyWater"]/text()', x))::text::numeric AS body_water,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="muscleMass"]/text()', x))::text::int AS muscle_mass_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="visceralFat"]/text()', x))::text::int AS visceral_fat,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="weight"]/text()', x))::text::numeric AS weight_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bmi"]/text()', x))::text::numeric AS bmi,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bodyFat"]/text()', x))::text::numeric AS body_fat,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="physiqueRating"]/text()', x))::text::text AS physique_rating,
        to_char(to_timestamp(unnest (xpath('//*[local-name()="item"]/*[local-name()="samplePk"]/text()', x))::text::numeric/1000) AT TIME ZONE 'UTC','YYYY-MM-DD HH24:MI:SS')  AS timestamp,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="calendarDate"]/text()', x))::text::text AS calendar_date,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="metabolicAge"]/text()', x))::text::text AS metabolic_age,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="boneMass"]/text()', x))::text::int AS bone_mass_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="caloricIntake"]/text()', x))::text::text AS caloric_intake,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="sourceType"]/text()', x))::text::text AS source_type
        
        
        FROM UNNEST (xpath('//*[local-name()="root"]', pg_read_file(%s)::xml)) x
                                        
        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
                
        """

    pd_df_sql = """
        INSERT INTO garmin_connect_body_composition(athlete_id,body_water,muscle_mass_gm,visceral_fat,weight_gm,bmi,
                                                        body_fat,physique_rating,timestamp,calendar_date,metabolic_age,
                                                        bone_mass_gm,caloric_intake,source_type)

        VALUES ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
          
        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
        """

    #Parse the XML document, and append the data to lists
    xml_document = parse(file_path)
    for item in xml_document.iterfind('item'):
        body_water.append(item.findtext('bodyWater'))
        muscle_mass_gm.append(item.findtext('muscleMass'))
        visceral_fat.append(item.findtext('visceralFat'))
        weight_gm.append(item.findtext('weight'))
        bmi.append(item.findtext('bmi'))
        body_fat.append(item.findtext('bodyFat'))
        physique_rating.append(item.findtext('physiqueRating'))
        timestamp.append(
            datetime.datetime.utcfromtimestamp(
                float(item.findtext('samplePk')) /
                1000).strftime('%Y-%m-%d %H:%M:%S'))
        calendar_date.append(item.findtext('calendarDate'))
        metabolic_age.append(item.findtext('metabolicAge'))
        bone_mass_gm.append(item.findtext('boneMass'))
        caloric_intake.append(item.findtext('caloricIntake'))
        source_type.append(item.findtext('sourceType'))

    #Get the data from lists to dataframe
    df = pd.DataFrame({
        'body_water': body_water,
        'muscle_mass_gm': muscle_mass_gm,
        'visceral_fat': visceral_fat,
        'weight_gm': weight_gm,
        'bmi': bmi,
        'body_fat': body_fat,
        'physique_rating': physique_rating,
        'timestamp': timestamp,
        'calendar_date': calendar_date,
        'metabolic_age': metabolic_age,
        'bone_mass_gm': bone_mass_gm,
        'caloric_intake': caloric_intake,
        'source_type': source_type
    })
    #Replace empty string with None.Required for SQL insert
    df.replace(r'', np.nan, inplace=True)
    df = df.where(pd.notnull(df), None)

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        with StdoutRedirection(ath_un):
            print('Inserting Body composition Data into postgreSQL:')
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Body composition Data into postgreSQL:')
        #Iterate through rows in pandas dataframe
        for row in df.itertuples():
            df_body_water = row.body_water
            df_muscle_mass_gm = row.muscle_mass_gm
            df_visceral_fat = row.visceral_fat
            df_weight_gm = row.weight_gm
            df_bmi = row.bmi
            df_body_fat = row.body_fat
            df_physique_rating = row.physique_rating
            df_timestamp = row.timestamp
            df_calendar_date = row.calendar_date
            df_metabolic_age = row.metabolic_age
            df_bone_mass_gm = row.bone_mass_gm
            df_caloric_intake = row.caloric_intake
            df_source_type = row.source_type

            cur.execute(
                pd_df_sql,
                (athlete_id, df_body_water, df_muscle_mass_gm, df_visceral_fat,
                 df_weight_gm, df_bmi, df_body_fat, df_physique_rating,
                 df_timestamp, df_calendar_date, df_metabolic_age,
                 df_bone_mass_gm, df_caloric_intake, df_source_type))

        ### Insert using Xpath method ###
        #cur.execute(xpath_sql,(athlete_id,pg_read_file))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print('Body composition Data Inserted Successfully')
Exemplo n.º 26
0
def get_weather(ath_un,db_host, db_name, superuser_un,superuser_pw,start_date,end_date,encr_pass):

    sql_timezones_select = '''
    SELECT timestamp_gmt,end_time_gmt,long_degr,lat_degr,alt_avrg from timezones 
    WHERE timestamp_gmt::date >= %s and timestamp_gmt::date <= %s 
    ORDER BY timestamp_gmt asc;
    '''

    pd_df_sql = """
    INSERT INTO weather(athlete_id,timestamp_gmt,temperature,dew_point,relative_humidity,precipitation,snow,wind_direction,wind_speed,wind_gust,sea_air_pressure,total_sunshine,condition_code)
    VALUES ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
    ON CONFLICT (athlete_id,timestamp_gmt) DO NOTHING;
    """

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    try:
        conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)
        conn.autocommit = True

        cur = conn.cursor()
        cur.execute(sql_timezones_select,(start_date,end_date))

        long_degr = None
        lat_degr = None
        alt_avrg = None
        row_start_time = None
        row_end_time = None
        row_start_time_dt = None
        row_end_time_dt = None
        start_time = None
        end_time = None
        #Earth radius
        R = 6373.0
        row_nr = 1
        combined_df = pd.DataFrame()

        def meteostat(lat_degr,long_degr,alt_avrg,start_time,end_time):
            #Retrieve nearest weather station
            stations = Stations()
            stations = stations.nearby(lat_degr, long_degr)
            station = stations.fetch(1)

            #Use Point to agregate nearby weather stations data for better accuracy
            weather_point = Point(lat_degr, long_degr,alt_avrg)
            weather_data = Hourly(weather_point, start_time - timedelta(0,7200), end_time + timedelta(0,7200))
            weather_data = weather_data.fetch()
            #Use weather data from nearest station if Point returns an empty dataset
            if weather_data.empty:
                weather_data = Hourly(station, start_time - timedelta(0,7200), end_time + timedelta(0,7200))
                weather_data = weather_data.fetch()
            return weather_data
        
        for row in cur:
            #Retrieve activity start and end times and convert to datetime
            row_start_time = row[0]
            row_start_time_dt = datetime.strptime((str(row_start_time)), "%Y-%m-%d %H:%M:%S")
            row_end_time = row[1]
            row_end_time_dt = datetime.strptime((str(row_end_time)), "%Y-%m-%d %H:%M:%S")
            #Retrieve activity average altitude
            row_alt_avrg = row[4]

            #This is the first row,set initial values.
            if row_nr == 1:
                long_degr = row[2]
                lat_degr = row[3]
                start_time = row_start_time_dt
                end_time = row_end_time_dt

            #Calculate distance between subsequent activity locations
            lon1 = math.radians(long_degr)
            lat1 = math.radians(lat_degr)
            lon2 = math.radians(row[2])
            lat2 = math.radians(row[3])
            dlon = lon2 - lon1
            dlat = lat2 - lat1
            a = math.sin(dlat / 2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2)**2
            c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
            distance = R * c

            #If subsequent activities within 30km update endtime and move onto next row
            if distance < 30:
                end_time = row_end_time_dt
                #start_time stays the same
                #move onto next row
            #If subsequent activities not within 30km, download weather data and reset end and start times   
            else:
                combined_df = combined_df.append(meteostat(lat_degr,long_degr,alt_avrg,start_time,end_time))
                with StdoutRedirection(ath_un):
                    print(('Inserting weather data for period from: ' + str(start_time) + ' until: ' + str(end_time)))
                with ProgressStdoutRedirection(ath_un):
                    print(('Inserting weather data for period from: ' + str(start_time) + ' until: ' + str(end_time)))
                start_time = row_start_time_dt
                end_time = row_end_time_dt
                
            #Update variables with values from the current row and move onto the next one
            long_degr = row[2]
            lat_degr = row[3]
            alt_avrg = row[4]
            
            #Incrememt the row number variable
            row_nr = row_nr+1

        #There is one more block to be retrieved   
        combined_df = combined_df.append(meteostat(lat_degr,long_degr,alt_avrg,start_time,end_time))
        with StdoutRedirection(ath_un):
            print(('Inserting weather data for period from: ' + str(start_time) + ' until: ' + str(end_time)))
        with ProgressStdoutRedirection(ath_un):
            print(('Inserting weather data for period from: ' + str(start_time) + ' until: ' + str(end_time)))
        cur.close()

        #Iterate through rows in the consolidated pandas df
        combined_df = combined_df.reset_index()
        for row in combined_df.itertuples():
            row_timeGMT = row.time
            row_temp = row.temp
            row_dwpt = row.dwpt
            row_rhum = row.rhum
            row_prcp = row.prcp
            row_snow = row.snow
            row_wdir = row.wdir
            row_wspd = row.wspd
            row_wpgt = row.wpgt
            row_pres = row.pres
            row_tsun = row.tsun
            row_coco = row.coco

            ### Insert weather data to weather ###
            # create a cursor
            cur = conn.cursor()
            # execute a statement
            cur.execute(pd_df_sql,(ath_un,row_timeGMT,row_temp,row_dwpt,row_rhum,row_prcp,row_snow,row_wdir,row_wspd,row_wpgt,row_pres,row_tsun,row_coco))
            conn.commit() 
            # close the communication with the PostgreSQL
            cur.close()
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
    finally:
        if conn is not None:
            conn.close
Exemplo n.º 27
0
def create_user_db(ath_un, ath_pw, db_host, db_name, superuser_un,
                   superuser_pw, encrypted_superuser_pw, save_pwd, encr_pass):
    conn = None
    head, sep, tail = ath_un.partition('@')
    db_username = head

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql_get_lc_collate = "SHOW LC_COLLATE"
    sql_create_db = "CREATE DATABASE \"" + db_name + "\" WITH TEMPLATE = template0 ENCODING = 'UTF8' LC_COLLATE = %s LC_CTYPE = %s;"
    sql_create_dbuser = "******"" + db_username + "\" WITH PASSWORD \'" + ath_pw + "\';"
    sql_update_db_info_record = "UPDATE db_info SET db_name = %s, db_host = %s, db_un = %s, db_pw = %s, db_auto_synch = %s WHERE ath_un= %s; "

    try:
        conn = psycopg2.connect(dbname='postgres',
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)
        conn.autocommit = True

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Creating user database...')

        cur.execute(sql_get_lc_collate)
        lc_collate = cur.fetchone()
        cur.execute(sql_create_db, (lc_collate[0], lc_collate[0]))
        cur.execute(sql_create_dbuser)

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    #Insert user db connection info into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting DB connection info...')

        cur_localhost.execute(sql_update_db_info_record,
                              (db_name, db_host, superuser_un,
                               encrypted_superuser_pw, save_pwd, ath_un))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
Exemplo n.º 28
0
def dwnld_insert_strava_data(ath_un, db_host, db_name, superuser_un,
                             superuser_pw, strava_refresh_token, start_date_dt,
                             end_date_dt, save_pwd, encr_pass):

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    strava_params = config(filename="encrypted_settings.ini",
                           section="strava",
                           encr_pass=encr_pass)
    STRAVA_CLIENT_ID = str(strava_params.get("strava_client_id"))
    STRAVA_CLIENT_SECRET = str(strava_params.get("strava_client_secret"))
    STRAVA_TOKEN_URL = str(strava_params.get("strava_token_url"))

    sql_insert_strava_refresh_token = """

        DO
            $do$
                BEGIN
                    IF EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN
                        UPDATE athlete SET strava_refresh_token = %s where ath_un= %s;
                    END IF;
                END
            $do$

    """
    sql_insert_activity_summary = """

        INSERT INTO strava_activity_summary(athlete_id,strava_activity_id,strava_athlete_id,name,distance,moving_time,elapsed_time,total_elevation_gain,type,
        workout_type,external_id,upload_id,start_date,start_date_local,timezone,utc_offset,start_latitude,start_longitude,end_latitude,end_longitude,location_city,
        location_state,location_country,map,summary_polyline,trainer,commute,manual,gear_id,average_speed,max_speed,average_cadence,average_temp,average_watts,
        weighted_average_watts,kilojoules,device_watts,average_heartrate,max_heartrate,max_watts,elev_high,elev_low,suffer_score)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (start_date) DO NOTHING;
        
    """

    sql_insert_activity_streams = """

        INSERT INTO strava_activity_streams(activity_id,altitude,cadence,distance,grade_smooth,heartrate,latitude,longitude,moving,temp,time_gmt,velocity_smooth,watts)

        VALUES
        ((select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (time_gmt) DO NOTHING;
    """

    sql_insert_utc_offset = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
        """

    sql_timezone = """
        INSERT INTO timezones(gc_activity_id,strava_activity_id,timestamp_local,timestamp_gmt,timezone,long_degr,lat_degr,alt_avrg,end_time_gmt)
        
        VALUES
        (null,(select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (timestamp_gmt,long_degr,lat_degr) DO NOTHING;
        """

    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    def refresh_oauth_tokens():
        #Refresh the access token
        payload = dict(grant_type='refresh_token',
                       refresh_token=strava_refresh_token,
                       client_id=STRAVA_CLIENT_ID,
                       client_secret=STRAVA_CLIENT_SECRET)
        refresh = requests.post(STRAVA_TOKEN_URL, data=payload)
        response = refresh.json()
        oauth_token = response['access_token']
        refresh_token = response['refresh_token']
        expires_at = response['expires_at']

        if save_pwd == True:
            encrypted_refresh_token = base64.b64encode(
                encrypt(refresh_token, encr_pass))
            encrypted_refresh_token = encrypted_refresh_token.decode('utf-8')
        else:
            encrypted_refresh_token = None

        try:
            cur = conn.cursor()
            cur.execute(sql_insert_strava_refresh_token,
                        (ath_un, encrypted_refresh_token, ath_un))
            conn.commit()
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

        return oauth_token, expires_at

    oauth_token, expires_at = refresh_oauth_tokens()
    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
    epoch_start_date = start_date_dt.timestamp()
    epoch_end_date = end_date_dt.timestamp()

    #Loop through all activities
    page = 1
    activities_url = "https://www.strava.com/api/v3/activities"

    while True:
        # Retry 3 times if request fails
        tries = 3
        for i in range(tries):
            try:
                #Check if the oauth token is stil valid, and refresh if not.
                if int(datetime.now().timestamp()) > expires_at:
                    oauth_token, expires_at = refresh_oauth_tokens()
                    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
                    with ProgressStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            'The Strava oauth token expired and has been refreshed'
                        )
                # get page of 200 activities from Strava
                r = requests.get(
                    '{}?before={}&after={}&page={}&per_page=200'.format(
                        activities_url, epoch_end_date, epoch_start_date,
                        str(page)),
                    headers=header)
                sleep_sec = api_rate_limits(r)
            except Exception as e:
                if i < tries - 1:
                    time.sleep(10)
                    continue
                else:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + ' [' +
                               sys._getframe().f_code.co_name + ']' +
                               ' Error on line {}'.format(
                                   sys.exc_info()[-1].tb_lineno) + '  ' +
                               str(e)))
                    raise
            break

        if sleep_sec == 0:
            pass
        else:
            time.sleep(sleep_sec)
            pass

        # if no results then exit loop
        if (not r.json()):
            if conn is not None:
                conn.close()
                with StdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
                with ProgressStdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
            break

        #Do something with the response/data
        activity_data = json.loads(r.text)

        #assign activity values to variables
        for activity in activity_data:
            if "id" in activity:
                strava_activity_id = activity['id']
                #PG: Check whether the data for this activity have been inserted into to DB during one of the previous runs
                data_exist_for_activity = 'Strava activity {} data'.format(
                    strava_activity_id)
                data_exists = check_data_file_exists(data_exist_for_activity,
                                                     ath_un, db_host, db_name,
                                                     superuser_un,
                                                     superuser_pw, encr_pass)
                if data_exists == True:
                    with StdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping.'
                            .format(strava_activity_id)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping'
                            .format(strava_activity_id)))
                    continue
            else:
                strava_activity_id = None
            if "id" in activity['athlete']:
                strava_athlete_id = activity['athlete']['id']
            else:
                strava_athlete_id = None
            if "name" in activity:
                name = activity['name']
            else:
                name = None
            if "distance" in activity:
                distance = activity['distance']
            else:
                distance = None
            if "moving_time" in activity:
                moving_time = activity['moving_time']
            else:
                moving_time = None
            if "elapsed_time" in activity:
                elapsed_time = activity['elapsed_time']
            else:
                elapsed_time = None
            if "total_elevation_gain" in activity:
                total_elevation_gain = activity['total_elevation_gain']
            else:
                total_elevation_gain = None
            if "type" in activity:
                type = activity['type']
            else:
                type = None
            if "workout_type" in activity:
                workout_type = activity['workout_type']
            else:
                workout_type = None
            if "external_id" in activity:
                external_id = activity['external_id']
            else:
                external_id = None
            if "upload_id" in activity:
                upload_id = activity['upload_id']
            else:
                upload_id = None
            if "start_date" in activity:
                start_date = activity['start_date']
                start_date_dt = datetime.strptime(start_date,
                                                  '%Y-%m-%dT%H:%M:%SZ')
                start_date = datetime.strftime(start_date_dt,
                                               '%Y-%m-%d %H:%M:%S')
            else:
                start_date = None
            if "start_date_local" in activity:
                start_date_local = activity['start_date_local']
                start_date_local_dt = datetime.strptime(
                    start_date_local, '%Y-%m-%dT%H:%M:%SZ')
                start_date_local = datetime.strftime(start_date_local_dt,
                                                     '%Y-%m-%d %H:%M:%S')
            else:
                start_date_local = None
            if "timezone" in activity:
                timezone = activity['timezone']
            else:
                timezone = None
            if "utc_offset" in activity:
                utc_offset = activity['utc_offset']
            else:
                utc_offset = None
            if "start_latlng" in activity:
                start_latlng = activity['start_latlng']
                #if start_latlng is not None: #Changed 2021/10/29
                if start_latlng:
                    start_latitude = start_latlng[0]
                    start_longitude = start_latlng[1]
                else:
                    start_latitude = None
                    start_longitude = None
            else:
                start_latitude = None
                start_longitude = None
            if "end_latlng" in activity:
                end_latlng = activity['end_latlng']
                #if end_latlng is not None: #Changed 2021/10/29
                if end_latlng:
                    end_latitude = end_latlng[0]
                    end_longitude = end_latlng[1]
                else:
                    end_latitude = None
                    end_longitude = None
            else:
                end_latitude = None
                end_longitude = None
            if "location_city" in activity:
                location_city = activity['location_city']
            else:
                location_city = None
            if "location_state" in activity:
                location_state = activity['location_state']
            else:
                location_state = None
            if "location_country" in activity:
                location_country = activity['location_country']
            else:
                location_country = None
            if "id" in activity['map']:
                map = activity['map']['id']
            else:
                map = None
            if "summary_polyline" in activity['map']:
                summary_polyline = activity['map']['summary_polyline']
            else:
                summary_polyline = None
            if "trainer" in activity:
                trainer = activity['trainer']
            else:
                trainer = None
            if "commute" in activity:
                commute = activity['commute']
            else:
                commute = None
            if "manual" in activity:
                manual = activity['manual']
            else:
                manual = None
            if "gear_id" in activity:
                gear_id = activity['gear_id']
            else:
                gear_id = None
            if "average_speed" in activity:
                average_speed = activity['average_speed']
            else:
                average_speed = None
            if "max_speed" in activity:
                max_speed = activity['max_speed']
            else:
                max_speed = None
            if "average_cadence" in activity:
                average_cadence = activity['average_cadence']
            else:
                average_cadence = None
            if "average_temp" in activity:
                average_temp = activity['average_temp']
            else:
                average_temp = None
            if "average_watts" in activity:
                average_watts = activity['average_watts']
            else:
                average_watts = None
            if "weighted_average_watts" in activity:
                weighted_average_watts = activity['weighted_average_watts']
            else:
                weighted_average_watts = None
            if "kilojoules" in activity:
                kilojoules = activity['kilojoules']
            else:
                kilojoules = None
            if "device_watts" in activity:
                device_watts = activity['device_watts']
            else:
                device_watts = None
            if "average_heartrate" in activity:
                average_heartrate = activity['average_heartrate']
            else:
                average_heartrate = None
            if "max_heartrate" in activity:
                max_heartrate = activity['max_heartrate']
            else:
                max_heartrate = None
            if "max_watts" in activity:
                max_watts = activity['max_watts']
            else:
                max_watts = None
            if "elev_high" in activity:
                elev_high = activity['elev_high']
            else:
                elev_high = None
            if "elev_low" in activity:
                elev_low = activity['elev_low']
            else:
                elev_low = None
            if "suffer_score" in activity:
                suffer_score = activity['suffer_score']
            else:
                suffer_score = None

            with StdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))
            with ProgressStdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))

            #Insert activity summary data to DB
            try:
                cur = conn.cursor()
                cur.execute(
                    sql_insert_activity_summary,
                    (ath_un, strava_activity_id, strava_athlete_id, name,
                     distance, moving_time, elapsed_time, total_elevation_gain,
                     type, workout_type, external_id, upload_id, start_date,
                     start_date_local, timezone, utc_offset, start_latitude,
                     start_longitude, end_latitude, end_longitude,
                     location_city, location_state, location_country, map,
                     summary_polyline, trainer, commute, manual, gear_id,
                     average_speed, max_speed, average_cadence, average_temp,
                     average_watts, weighted_average_watts, kilojoules,
                     device_watts, average_heartrate, max_heartrate, max_watts,
                     elev_high, elev_low, suffer_score))
                conn.commit()
                cur.close()
                # Update the files table
                data_file_path_insert(data_exist_for_activity, ath_un, db_host,
                                      db_name, superuser_un, superuser_pw,
                                      encr_pass)

                #Insert utc offset into gmt_local_time_difference table if the record not already present from gc
                gmt_local_difference = timedelta(seconds=utc_offset)
                local_date = start_date_local_dt.date()
                local_midnight = start_date_local_dt.replace(hour=00,
                                                             minute=00,
                                                             second=00)
                gmt_midnight = local_midnight - gmt_local_difference

                cur = conn.cursor()
                cur.execute(sql_insert_utc_offset,
                            (ath_un, local_date, local_midnight, gmt_midnight,
                             gmt_local_difference))
                conn.commit()
                cur.close()

                #Insert timezone info into timezones table if the record not already present from gc
                if start_latitude is not None:
                    #Calculate avereage altitude for a better weather data accuracy
                    avg_altitude = int((elev_high + elev_low) / 2)
                    #Drop (GMT+hh:mm) from timezone string
                    timezone = re.sub("[(].*[) ]", "", timezone)
                    #Calculate end_time_gmt_str
                    end_time_gmt_dt = start_date_dt + timedelta(
                        seconds=elapsed_time)
                    end_time_gmt_str = datetime.strftime((end_time_gmt_dt),
                                                         "%Y-%m-%d %H:%M:%S")
                    cur = conn.cursor()
                    with StdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    with ProgressStdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    cur.execute(
                        sql_timezone,
                        (strava_activity_id, start_date_local, start_date,
                         timezone, start_longitude, start_latitude,
                         avg_altitude, end_time_gmt_str))
                    conn.commit()
                    cur.close()

            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

            #Get all available streams for the activity
            types = 'time,distance,latlng,altitude,velocity_smooth,heartrate,cadence,watts,temp,moving,grade_smooth'
            df_columns = {
                'activity_id', 'time_gmt', 'distance', 'latitude', 'longitude',
                'altitude', 'velocity_smooth', 'heartrate', 'cadence', 'watts',
                'temp', 'moving', 'grade_smooth'
            }
            df_columns = sorted(df_columns)

            # Retry 3 times if request fails
            tries = 3
            for i in range(tries):
                try:
                    #Check if the oauth token is stil valid, and refresh if not.
                    if int(datetime.now().timestamp()) > expires_at:
                        oauth_token, expires_at = refresh_oauth_tokens()
                        header = {
                            'Authorization': 'Bearer {}'.format(oauth_token)
                        }
                        with ProgressStdoutRedirection(ath_un):
                            print(
                                str(datetime.now()) +
                                'The Strava oauth token expired and has been refreshed'
                            )
                    streams = requests.get(
                        "https://www.strava.com/api/v3/activities/{}/streams?keys={}"
                        .format(strava_activity_id, types),
                        headers=header)
                    sleep_sec = api_rate_limits(streams)
                except Exception as e:
                    if i < tries - 1:
                        time.sleep(10)
                        continue
                    else:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + ' [' +
                                   sys._getframe().f_code.co_name + ']' +
                                   ' Error on line {}'.format(
                                       sys.exc_info()[-1].tb_lineno) + '  ' +
                                   str(e)))
                        raise
                break

            if sleep_sec == 0:
                pass
            else:
                time.sleep(sleep_sec)
                pass

            #TODO: if sleep_sec > 6hrs request a new access_token

            #Do something with the response/data

            activity_streams = json.loads(streams.text)
            #Check if there is data in activity_streams
            if 'message' in activity_streams:
                if "Resource Not Found" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' No streams retrieved for {}. Moving onto next activity.'
                            .format(strava_activity_id))
                    continue
                if "Rate Limit Exceeded" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' Download Rate limit exceeded. This attempt did not get caught by api_rate_limits() function!'
                        )
                    #Usualy due to failed request on a very last attempt before limit, and subsequent succesfull retry.Too messy, move onto next activity and see what happens.
                    continue
            else:
                pass

            activity_streams_df = pd.DataFrame(columns=df_columns)

            for stream in activity_streams:
                if "time" in stream['type']:
                    activity_id_list = []
                    time_stream = stream['data']
                    for i in range(len(time_stream)):
                        time_stream[i] = start_date_dt + timedelta(
                            0, int(time_stream[i]))
                        time_stream[i] = datetime.strftime(
                            time_stream[i], '%Y-%m-%d %H:%M:%S')
                        activity_id_list.append(strava_activity_id)
                    activity_streams_df['activity_id'] = activity_id_list
                    activity_streams_df['time_gmt'] = time_stream

                if "distance" in stream['type']:
                    distance_stream = stream['data']
                    activity_streams_df['distance'] = distance_stream

                if "latlng" in stream['type']:
                    latlng_stream = stream['data']
                    latitude = []
                    longitude = []
                    if latlng_stream is not None:
                        for i in range(len(latlng_stream)):
                            latitude.append(latlng_stream[i][0])
                            longitude.append(latlng_stream[i][1])
                    activity_streams_df['latitude'] = latitude
                    activity_streams_df['longitude'] = longitude

                if "altitude" in stream['type']:
                    altitude_stream = stream['data']
                    activity_streams_df['altitude'] = altitude_stream

                if "velocity_smooth" in stream['type']:
                    velocity_smooth_stream = stream['data']
                    activity_streams_df[
                        'velocity_smooth'] = velocity_smooth_stream

                if "heartrate" in stream['type']:
                    heartrate_stream = stream['data']
                    activity_streams_df['heartrate'] = heartrate_stream

                if "cadence" in stream['type']:
                    cadence_stream = stream['data']
                    activity_streams_df['cadence'] = cadence_stream

                if "watts" in stream['type']:
                    watts_stream = stream['data']
                    activity_streams_df['watts'] = watts_stream

                if "temp" in stream['type']:
                    temp_stream = stream['data']
                    activity_streams_df['temp'] = temp_stream

                if "moving" in stream['type']:
                    moving_stream = stream['data']
                    activity_streams_df['moving'] = moving_stream

                if "grade_smooth" in stream['type']:
                    grade_smooth_stream = stream['data']
                    activity_streams_df['grade_smooth'] = grade_smooth_stream

            activity_streams_df = activity_streams_df.reindex(
                sorted(activity_streams_df.columns),
                axis=1)  # Sort df alphabeticaly
            activity_streams_df = activity_streams_df.where(
                pd.notnull(activity_streams_df), None)  # Replace NaN with None
            df2list = activity_streams_df.values.tolist()

            #Insert activity streams data to DB
            try:
                cur = conn.cursor()
                for sublist in df2list:
                    cur.execute(sql_insert_activity_streams, (sublist))
                    conn.commit()
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

        # increment page
        page += 1
Exemplo n.º 29
0
def user_tokens_insert(ath_un, db_host, db_name, superuser_un, superuser_pw,
                       dbx_auth_token, oura_refresh_token,
                       strava_refresh_token, encr_pass, save_pwd):

    if save_pwd == True:
        #Encrypt dbx token
        if dbx_auth_token is not None:
            encrypted_dbx_auth_token = base64.b64encode(
                encrypt(dbx_auth_token, encr_pass))
            encrypted_dbx_auth_token = encrypted_dbx_auth_token.decode('utf-8')
        else:
            encrypted_dbx_auth_token = None
        #Encrypt oura token
        if oura_refresh_token is not None:
            encrypted_oura_refresh_token = base64.b64encode(
                encrypt(oura_refresh_token, encr_pass))
            encrypted_oura_refresh_token = encrypted_oura_refresh_token.decode(
                'utf-8')
        else:
            encrypted_oura_refresh_token = None
        #Encrypt strava token
        if strava_refresh_token is not None:
            encrypted_strava_refresh_token = base64.b64encode(
                encrypt(strava_refresh_token, encr_pass))
            encrypted_strava_refresh_token = encrypted_strava_refresh_token.decode(
                'utf-8')
        else:
            encrypted_strava_refresh_token = None
    else:
        encrypted_dbx_auth_token = None
        encrypted_oura_refresh_token = None
        encrypted_strava_refresh_token = None

    #Query params lists
    ath_user = (ath_un, )
    auth_token_tuple = (encrypted_dbx_auth_token, )
    oura_token_tuple = (encrypted_oura_refresh_token, )
    strava_token_tuple = (encrypted_strava_refresh_token, )

    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN  

    UPDATE athlete SET ath_un = %s where ath_un = %s;
            
    ELSE
    INSERT INTO athlete (ath_un) VALUES 
    (%s);

    END IF;
    
    END
    $do$
    """

    sql_insert_dbx_auth_token = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN

    UPDATE athlete SET dropbox_access_token = %s where ath_un= %s;


    END IF;
    
    END
    $do$

    """

    sql_insert_oura_refresh_token = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN

    UPDATE athlete SET oura_refresh_token = %s where ath_un= %s;

    END IF;
    
    END
    $do$

    """

    sql_insert_strava_refresh_token = """
    DO
    $do$
    BEGIN
    IF

    EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN

    UPDATE athlete SET strava_refresh_token = %s where ath_un= %s;

    END IF;
    
    END
    $do$

    """

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting GC User Data into postgreSQL:')
        cur.execute(sql, (ath_user, ath_user, ath_user, ath_user))
        conn.commit()

        if dbx_auth_token is not None:
            with ProgressStdoutRedirection(ath_un):
                print(
                    'Inserting Dropbox user authentication token into postgreSQL:'
                )
            cur.execute(sql_insert_dbx_auth_token,
                        (ath_user, auth_token_tuple, ath_user))
            conn.commit()

        if oura_refresh_token is not None:
            with ProgressStdoutRedirection(ath_un):
                print('Inserting Oura refresh token into postgreSQL:')
            cur.execute(sql_insert_oura_refresh_token,
                        (ath_user, oura_token_tuple, ath_user))
            conn.commit()

        if strava_refresh_token is not None:
            with ProgressStdoutRedirection(ath_un):
                print('Inserting Strava refresh token into postgreSQL:')
            cur.execute(sql_insert_strava_refresh_token,
                        (ath_user, strava_token_tuple, ath_user))
            conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
Exemplo n.º 30
0
def backup_user_db(db_name, ath_un, output, dbx_auth_token, encr_pass):

    if os.name == 'nt':
        backup_file_path = output + '\\' + ath_un + '\DB_Backup.gz'
    else:
        backup_file_path = output + '/' + ath_un + '/DB_Backup.gz'

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
    open(pidfile).close

    # read connection parameters
    params = config(filename="encrypted_settings.ini",
                    section="postgresql",
                    encr_pass=encr_pass)
    superuser_un = params.get("user")
    superuser_pw = params.get("password")
    db_host = params.get("host")

    try:
        with gzip.open(backup_file_path, 'wb') as f:
            with StdoutRedirection(ath_un):
                print('Backing up DB, please wait....')
            with ProgressStdoutRedirection(ath_un):
                print('Backing up DB, please wait....')

            # create .pgpass in user's AppData or home directory
            if os.name == 'nt':
                appdata_dir = os.getenv('APPDATA')
                pgpass_dir = os.path.join(appdata_dir, "postgresql")
                pgpassfile = os.path.join(pgpass_dir, "pgpass.conf")
                if not os.path.exists(pgpass_dir):
                    os.makedirs(pgpass_dir)
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
            else:
                pgpassfile = os.path.expanduser('~') + '/.pgpass'
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
                    os.chmod(pgpassfile, 0o600)
            # execute pg_dump
            popen = subprocess.Popen(
                ['pg_dump', '-h', db_host, '-U', superuser_un, db_name],
                stdout=subprocess.PIPE,
                universal_newlines=True)
            for stdout_line in iter(popen.stdout.readline, ""):
                f.write(stdout_line.encode())
            popen.stdout.close()
            popen.wait()
            try:
                os.remove(pgpassfile)
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
            with StdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
            with ProgressStdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(e)))

    today = datetime.datetime.today().strftime('%Y%m%d')
    download_folder_dbx = 'DB_Backup'
    file_name_to = today + '_DB_Backup.gz'

    dbx_file_exists = check_if_file_exists_in_dbx(file_name_to, dbx_auth_token,
                                                  download_folder_dbx,
                                                  encr_pass)
    if dbx_file_exists == True:
        with StdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
        with ProgressStdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
    else:
        try:
            with StdoutRedirection(ath_un):
                print(('Uploading ' + file_name_to + ' Dropbox, please wait.'))
            download_files_to_dbx(backup_file_path, file_name_to,
                                  dbx_auth_token, download_folder_dbx,
                                  encr_pass)
            with StdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
            with ProgressStdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))