コード例 #1
0
def check_host_record_exists(ath_un, db_name, db_host, encr_pass):
    conn_localhost = None

    sql_check_db_host_exists = """
    SELECT db_host FROM db_info WHERE db_name = %s;
    """

    try:
        # connect to the PostgreSQL server
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Checking whether the db_host record aleady exists')

        cur.execute(sql_check_db_host_exists, (db_name, ))
        result = cur.fetchone()

        if result:
            if result[0] is not None:  #Previous record exists
                stored_db_host = result[0]
                if stored_db_host != db_host:  #Previous record does not match
                    db_host_diff = True
                else:
                    db_host_diff = False
            else:  #No previous record exists
                db_host_diff = False
        else:
            db_host_diff = False

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close()

    return db_host_diff
コード例 #2
0
def ath_auth_reset(ath_un, ath_pw, encr_pass):

    sql_update_usr_pwd = "UPDATE db_info SET ath_pw = %s where ath_un= %s;"

    ath_pw_hash = generate_password_hash(ath_pw)
    query_params = (ath_pw_hash, ath_un)

    try:
        # connect to the PostgreSQL server
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        superuser_un = params.get("user")
        superuser_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=superuser_un,
                                          password=superuser_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur = conn_localhost.cursor()
        cur.execute(sql_update_usr_pwd, query_params)

        # Reset superset user password
        superset_params = config(filename="encrypted_settings.ini",
                                 section="superset",
                                 encr_pass=encr_pass)
        superset_used = str(superset_params.get("superset"))
        if superset_used == 'true':
            os.system(
                "superset fab reset-password  --username {} --password {}".
                format(ath_un, ath_pw))

    except Exception as error:
        print((str(datetime.datetime.now()) + ' [' +
               sys._getframe().f_code.co_name + ']' +
               ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
               '  ' + str(error)))
        return None
    finally:
        if conn_localhost is not None:
            conn_localhost.close()
コード例 #3
0
def send_email(encr_pass, subject, body, att_path=None, recipients=None):
    app_params = config(filename="encrypted_settings.ini",
                        section="app",
                        encr_pass=encr_pass)
    smtp_user = app_params.get("smtp_user")
    smtp_password = app_params.get("smtp_password")
    smtp_default_sender = app_params.get("smtp_default_sender")
    admin_email = app_params.get("admin_email")
    smtp_server = app_params.get("smtp_server")
    smtp_server_port = int(app_params.get("smtp_server_port"))

    # Create a multipart message and set headers
    message = MIMEMultipart()
    message["From"] = smtp_default_sender
    if recipients is not None:
        if isinstance(recipients, list):
            message["To"] = ", ".join(recipients)
        else:
            message["To"] = recipients
    else:
        recipients = admin_email
        message["To"] = recipients
    message["Subject"] = subject
    #message["Bcc"] = receiver_email  # Recommended for mass emails

    # Add body to email
    message.attach(MIMEText(body, "plain"))

    if att_path is not None:
        # Open attachement file in binary mode
        with open(att_path, "rb") as attachment:
            # Add file as application/octet-stream
            part = MIMEBase("application", "octet-stream")
            part.set_payload(attachment.read())

        # Encode file in ASCII characters to send by email
        encoders.encode_base64(part)

        filename = os.path.basename(os.path.normpath(att_path))

        # Add header as key/value pair to attachment part
        part.add_header("Content-Disposition",
                        "attachment; filename= %s" % filename)

        # Add attachment to message
        message.attach(part)
    # Convert message to string
    text = message.as_string()

    # Log in to server and send email
    server = smtplib.SMTP_SSL(smtp_server, smtp_server_port)
    server.login(smtp_user, smtp_password)
    server.sendmail(smtp_default_sender, recipients, text)
コード例 #4
0
def check_if_file_exists_in_dbx(file_name, dbx_auth_token, folder, encr_pass):
    dbx_params = config(filename="encrypted_settings.ini",
                        section="dropbox",
                        encr_pass=encr_pass)
    APP_KEY = str(dbx_params.get("app_key"))
    APP_SECRET = str(dbx_params.get("app_secret"))
    access_token = dbx_auth_token
    checkIfFileExists = CheckIfFileExists(access_token, APP_KEY, APP_SECRET)
    file_name_to = file_name
    file_path_to = '/' + folder + '/' + file_name_to

    file_exists = checkIfFileExists.check_if_file_exists(file_path_to)

    return file_exists
コード例 #5
0
def download_files_to_dbx(file_path_from, file_name, dbx_auth_token, folder,
                          encr_pass):
    dbx_params = config(filename="encrypted_settings.ini",
                        section="dropbox",
                        encr_pass=encr_pass)
    APP_KEY = str(dbx_params.get("app_key"))
    APP_SECRET = str(dbx_params.get("app_secret"))
    access_token = dbx_auth_token
    transferData = TransferData(access_token, APP_KEY, APP_SECRET)
    file_name_to = file_name
    file_path_to = '/' + folder + '/' + file_name_to

    file_from = file_path_from
    file_to = file_path_to

    transferData.upload_file(file_from, file_to)
コード例 #6
0
def start_autosynch_loop(encr_pass=None):
    #Retrieve encr_pass from .temp file if running on unix, else the encr_pass will be passed as argument
    if os.name != 'nt':
        if os.fork(
        ):  # The script will run in the background.Returns 0 in the child, pid of the child in the parent
            sys.exit()
        time.sleep(1)
        print(
            'The autosynch process has been started and will continue running in the background...'
        )

        path_params = config(filename="encrypted_settings.ini", section="path")
        TEMP_FILE_PATH = str(path_params.get("temp_file_path"))

        #Retrieve passphrase from the .temp file
        f = open(TEMP_FILE_PATH, "r")
        passphrase_input = f.read()
        f.close

        encr_pass = passphrase_input

    while True:
        interval = 3600  #number of seconds to wait between runs
        task_start = time.time()
        # Retrieve list of credentials for user DB's with last synch older than n (specified in .ini)
        retrieve_decrypt_creds(get_databases_list(encr_pass), encr_pass)
        task_end = time.time()

        previous_task_duration = task_end - task_start
        #Previous run time exceeds interval, start next run immediately.
        if previous_task_duration > interval:
            with ConsolidatedProgressStdoutRedirection():
                print((str(datetime.now()) + '\n---- Last run took ' +
                       str(previous_task_duration) +
                       ' seconds, which exceeds the ' + str(interval) +
                       ' seconds interval.Starting the next run now.----\n'))
            retrieve_decrypt_creds(get_databases_list(encr_pass), encr_pass)
        #Previous run time is less then interval. Sleep for the time difference.
        else:
            with ConsolidatedProgressStdoutRedirection():
                print((str('\n---- Previous run finished at:' +
                           str(datetime.now())) + ', and took ' +
                       str(previous_task_duration) +
                       ' seconds to complete. Next run will start in ' +
                       str(interval - previous_task_duration) +
                       ' seconds.----\n'))
            time.sleep((task_start + interval) - task_end)
コード例 #7
0
def check_user_exists(ath_un, encr_pass):

    conn_localhost = None

    sql_check_user_exists = """
    SELECT ath_un FROM db_info WHERE ath_un = %s;
    """

    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        cur_localhost.execute(sql_check_user_exists, (ath_un, ))

        result = cur_localhost.fetchone()
        if result[0] is not None:  #User record exists
            user_exists = True
            return user_exists
        else:
            user_exists = False

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
コード例 #8
0
def insert_last_synch_timestamp(ath_un, encr_pass, db_name):
    last_synch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    last_synch = (last_synch, )
    user_db = (db_name, )

    sql_postgres_db = """     
    UPDATE db_info SET last_synch = %s where db_name= %s;
    """
    #Insert last_synch into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print(
                'Inserting last_synch timestamp into postgres/db_info table:')

        cur_localhost.execute(sql_postgres_db, (last_synch, user_db))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
コード例 #9
0
def ath_auth_login(ath_un, ath_pw, encr_pass):

    sql_select_usr_pwd = "SELECT ath_pw FROM db_info WHERE ath_un = %s;"

    query_params = (ath_un, )

    try:
        # connect to the PostgreSQL server
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        superuser_un = params.get("user")
        superuser_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=superuser_un,
                                          password=superuser_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur = conn_localhost.cursor()
        cur.execute(sql_select_usr_pwd, query_params)
        result = cur.fetchone()
        cur.close()
        if check_password_hash(result[0], ath_pw):
            return ath_un
        else:
            return None
    except Exception as error:
        print((str(datetime.datetime.now()) + ' [' +
               sys._getframe().f_code.co_name + ']' +
               ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
               '  ' + str(error)))
        return None
    finally:
        if conn_localhost is not None:
            conn_localhost.close()
コード例 #10
0
import os
import datetime
import Crypto.Random
from Crypto.Cipher import AES
from db_encrypt import generate_key, pad_text, unpad_text, str2md5
import base64

#----Crypto Variables----
# salt size in bytes
SALT_SIZE = 16
# number of iterations in the key generation
NUMBER_OF_ITERATIONS = 2000  # PG:Consider increasing number of iterations
# the size multiple required for AES
AES_MULTIPLE = 16

path_params = config(filename="encrypted_settings.ini", section="path")
PID_FILE_DIR = path_params.get("pid_file_dir")


def encrypt(plaintext, password):
    salt = Crypto.Random.get_random_bytes(SALT_SIZE)
    #iv = Crypto.Random.get_random_bytes(AES.block_size)
    key = generate_key(password, salt, NUMBER_OF_ITERATIONS)
    cipher = AES.new(key, AES.MODE_ECB)  #PG: Consider changing to MODE_CBC
    padded_plaintext = pad_text(plaintext, AES_MULTIPLE)
    ciphertext = cipher.encrypt(padded_plaintext)
    ciphertext_with_salt = salt + ciphertext
    return ciphertext_with_salt


@processify
コード例 #11
0
def dwnld_insert_oura_data(ath_un,db_host,db_name,superuser_un,superuser_pw,oura_refresh_token,start_date_dt,end_date_dt,save_pwd,encr_pass):

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
    
    oura_params = config(filename="encrypted_settings.ini", section="oura",encr_pass=encr_pass)
    OURA_CLIENT_ID = str(oura_params.get("oura_client_id"))
    OURA_CLIENT_SECRET = str(oura_params.get("oura_client_secret"))
    OURA_TOKEN_URL = str(oura_params.get("oura_token_url"))
    
    #Refresh the access token
    payload = dict(grant_type='refresh_token', refresh_token=oura_refresh_token,client_id=OURA_CLIENT_ID,client_secret=OURA_CLIENT_SECRET)
    refresh = requests.post(OURA_TOKEN_URL, data=payload)
    response = refresh.json()
    access_token = response['access_token']
    refresh_token = response['refresh_token']

    if save_pwd == True:
        encrypted_refresh_token = base64.b64encode(encrypt(refresh_token, encr_pass))
        encrypted_refresh_token = encrypted_refresh_token.decode('utf-8')
    else:
        encrypted_refresh_token = None

    start_date = datetime.datetime.strftime(start_date_dt,"%Y-%m-%d")
    end_date = datetime.datetime.strftime(end_date_dt,"%Y-%m-%d")

    conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)
 
    sql_insert_oura_refresh_token = """

        DO
            $do$
                BEGIN
                    IF EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN
                        UPDATE athlete SET oura_refresh_token = %s where ath_un= %s;
                    END IF;
                END
            $do$

    """

    sql_insert_sleep_summary = """

        INSERT INTO oura_sleep_daily_summary(athlete_id,summary_date,period_id,is_longest,timezone,bedtime_start,bedtime_end,score,score_total,score_disturbances,
        score_efficiency,score_latency,score_rem,score_deep,score_alignment,total,duration,awake,light,rem,deep,onset_latency,restless,efficiency,midpoint_time,hr_lowest,
        hr_average ,rmssd,breath_average ,temperature_delta ,bedtime_end_delta,midpoint_at_delta,bedtime_start_delta,temperature_deviation,temperature_trend_deviation)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date,period_id) DO NOTHING;
        
    """

    sql_insert_activity_summary = """

        INSERT INTO oura_activity_daily_summary(athlete_id,summary_date,day_start,day_end,timezone,score,score_stay_active,score_move_every_hour,score_meet_daily_targets,
        score_training_frequency,score_training_volume,score_recovery_time,daily_movement,non_wear,rest,inactive,inactivity_alerts,low,medium,high,steps,cal_total,
        cal_active,met_min_inactive,met_min_low,met_min_medium,met_min_high,average_met,rest_mode_state,to_target_km,target_miles,total,to_target_miles,target_calories,target_km)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date) DO NOTHING;
        
    """

    sql_insert_readiness_summary = """

        INSERT INTO oura_readiness_daily_summary(athlete_id,summary_date,period_id,score,score_previous_night,score_sleep_balance,score_previous_day,
        score_activity_balance,score_resting_hr,score_hrv_balance,score_recovery_index,score_temperature,rest_mode_state)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (summary_date,period_id) DO NOTHING;
        
    """

    sql_insert_sleep_detail = """

        INSERT INTO oura_sleep_detail(oura_sleep_id,timestamp_gmt,hypnogram_5min,hr_5min,rmssd_5min)

        VALUES
        ((select id from oura_sleep_daily_summary where summary_date=%s and period_id=%s),%s,%s,%s,%s)

        ON CONFLICT (timestamp_gmt) DO NOTHING;
        
    """

    sql_insert_activity_detail = """

        INSERT INTO oura_activity_detail(oura_activity_id,timestamp_gmt,class_5min,met_1min)

        VALUES
        ((select id from oura_activity_daily_summary where summary_date=%s),%s,%s,%s)

        ON CONFLICT (timestamp_gmt) DO NOTHING;
        
    """

    sql_insert_utc_offset = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
    """

    try:       
        cur = conn.cursor()
        cur.execute(sql_insert_oura_refresh_token,(ath_un,encrypted_refresh_token,ath_un))
        conn.commit()       
        cur.close()
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))


    #------------------- Retrieve Sleep data ---------------------------

    sleep_data = requests.get('https://api.ouraring.com/v1/sleep?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_sleep = sleep_data.json()
    sleep_df = pd.DataFrame(json_sleep['sleep'])
    ####sleep_df = sleep_df.fillna(0)
    sleep_df = sleep_df.where(pd.notnull(sleep_df), None)

    for row in sleep_df.itertuples():
        awake = row.awake
        bedtime_end = row.bedtime_end
        #convert bedtime_end to utc
        bedtime_end = datetime.datetime.strptime((bedtime_end), "%Y-%m-%dT%H:%M:%S%z")
        bedtime_end = bedtime_end.astimezone(pytz.utc)
        bedtime_end_delta = row.bedtime_end_delta
        bedtime_start = row.bedtime_start
        #convert bedtime_start to utc
        bedtime_start = datetime.datetime.strptime((bedtime_start), "%Y-%m-%dT%H:%M:%S%z")
        bedtime_start = bedtime_start.astimezone(pytz.utc)
        bedtime_start_delta = row.bedtime_start_delta
        breath_average = row.breath_average
        deep = row.deep
        duration = row.duration
        efficiency = row.efficiency
        hr_5min = row.hr_5min
        hr_average = row.hr_average
        hr_lowest = row.hr_lowest
        hypnogram_5min = row.hypnogram_5min
        is_longest = row.is_longest
        light = row.light
        midpoint_at_delta = row.midpoint_at_delta
        midpoint_time = row.midpoint_time
        onset_latency = row.onset_latency
        period_id = row.period_id
        rem = row.rem
        restless = row.restless
        rmssd = row.rmssd
        rmssd_5min = row.rmssd_5min
        score = row.score
        score_alignment = row.score_alignment
        score_deep = row.score_deep
        score_disturbances = row.score_disturbances
        score_efficiency = row.score_efficiency
        score_latency = row.score_latency
        score_rem = row.score_rem
        score_total = row.score_total
        summary_date = row.summary_date
        temperature_delta = row.temperature_delta
        temperature_deviation = row.temperature_deviation
        temperature_trend_deviation = row.temperature_trend_deviation
        timezone = row.timezone
        total = row.total

        with StdoutRedirection(ath_un):
            print('Downloading Oura sleep data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura sleep data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_sleep_summary,(ath_un,summary_date,period_id,is_longest,timezone,datetime.datetime.strftime(bedtime_start,"%Y-%m-%d %H:%M:%S"),
                       datetime.datetime.strftime(bedtime_end,"%Y-%m-%d %H:%M:%S"),score,score_total,score_disturbances,score_efficiency,score_latency,score_rem,score_deep,
                       score_alignment,total,duration,awake,light,rem,deep,onset_latency,restless,efficiency,midpoint_time,hr_lowest,hr_average ,rmssd,breath_average ,
                       temperature_delta,bedtime_end_delta,midpoint_at_delta,bedtime_start_delta,temperature_deviation,temperature_trend_deviation)
                       )
            conn.commit()       
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

        #Create and populate a list of 5min intervals starting from bedtime_start
        rmssd_ints = []
        #add first value (bedtime_start) to 5 min intervals list
        rmssd_ints.append(datetime.datetime.strftime(bedtime_start,"%Y-%m-%d %H:%M:%S"))
        rmssd_int = bedtime_start
        
        for i in range (len(rmssd_5min)-1):
            rmssd_int = rmssd_int + datetime.timedelta(seconds=300)
            rmssd_int_str = datetime.datetime.strftime(rmssd_int,"%Y-%m-%d %H:%M:%S")
            rmssd_ints.append(rmssd_int_str)
        
        #Create sleep detail dataframe
        sleep_detail_df = pd.DataFrame({'timestamp': pd.Series(rmssd_ints), 'hr_5min': pd.Series(hr_5min),'hypnogram_5min': pd.Series(list(hypnogram_5min)),'rmssd_5min': pd.Series(rmssd_5min)})
        ####sleep_detail_df = sleep_detail_df.fillna(0)
        sleep_detail_df = sleep_detail_df.where(pd.notnull(sleep_detail_df), None)
        
        for row in sleep_detail_df.itertuples():
            timestamp_row = row.timestamp
            hr_5min_row = row.hr_5min
            hypnogram_5min_row = row.hypnogram_5min
            rmssd_5min_row = row.rmssd_5min

            try:       
                cur = conn.cursor()
                cur.execute(sql_insert_sleep_detail,(summary_date,period_id,timestamp_row,hypnogram_5min_row,hr_5min_row,rmssd_5min_row))
                conn.commit()       
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))



    #------------------- Retrieve Activity data ---------------------------
    
    
    activity_data = requests.get('https://api.ouraring.com/v1/activity?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_activity = activity_data.json()
    activity_df = pd.DataFrame(json_activity['activity'])
    ####activity_df = activity_df.fillna(0)
    activity_df = activity_df.where(pd.notnull(activity_df), None)

    for row in activity_df.itertuples(): 
        average_met = row.average_met
        cal_active = row.cal_active
        cal_total = row.cal_total
        class_5min = row.class_5min
        daily_movement = row.daily_movement
        day_end = row.day_end
        #convert day_end to utc
        day_end = datetime.datetime.strptime((day_end), "%Y-%m-%dT%H:%M:%S%z")
        day_end = day_end.astimezone(pytz.utc)
        day_start = row.day_start
        #convert day_start to utc
        day_start = datetime.datetime.strptime((day_start), "%Y-%m-%dT%H:%M:%S%z")
        day_start = day_start.astimezone(pytz.utc)
        high = row.high
        inactive = row.inactive
        inactivity_alerts = row.inactivity_alerts
        low = row.low
        medium = row.medium
        met_1min = row.met_1min
        met_min_high = row.met_min_high
        met_min_inactive = row.met_min_inactive
        met_min_low = row.met_min_low
        met_min_medium = row.met_min_medium
        non_wear = row.non_wear
        rest = row.rest
        rest_mode_state = row.rest_mode_state
        try:
            score = row.score
            score_meet_daily_targets = row.score_meet_daily_targets
            score_move_every_hour = row.score_move_every_hour
            score_recovery_time = row.score_recovery_time
            score_stay_active = row.score_stay_active
            score_training_frequency = row.score_training_frequency
            score_training_volume = row.score_training_volume
        #If Ouraring rest mode enabled
        except:
            score=score_meet_daily_targets=score_move_every_hour=score_recovery_time=score_stay_active=score_training_frequency=score_training_volume=None
        steps = row.steps
        summary_date = row.summary_date
        target_calories = row.target_calories
        target_km = row.target_km
        target_miles = row.target_miles
        timezone = row.timezone
        to_target_km = row.to_target_km
        to_target_miles = row.to_target_miles
        total = row.total


        with StdoutRedirection(ath_un):
            print('Downloading Oura activity data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura activity data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_activity_summary,(ath_un,summary_date,datetime.datetime.strftime(day_start,"%Y-%m-%d %H:%M:%S"),datetime.datetime.strftime(day_end,"%Y-%m-%d %H:%M:%S"),
                        timezone,score,score_stay_active,score_move_every_hour,score_meet_daily_targets,score_training_frequency,score_training_volume,score_recovery_time,daily_movement,
                        non_wear,rest,inactive,inactivity_alerts,low,medium,high,steps,cal_total,cal_active,met_min_inactive,met_min_low,met_min_medium,met_min_high,average_met,
                        rest_mode_state,to_target_km,target_miles,total,to_target_miles,target_calories,target_km)
                        )
            conn.commit()       
            cur.close()

            #Insert utc offset into gmt_local_time_difference table if the record not already present from gc
            gmt_local_difference = datetime.timedelta(minutes=timezone)
            local_midnight = datetime.datetime.strptime(summary_date, '%Y-%m-%d')
            gmt_midnight = local_midnight-gmt_local_difference
            local_date = local_midnight.date()

            cur = conn.cursor()
            cur.execute(sql_insert_utc_offset,(ath_un,local_date,local_midnight,gmt_midnight,gmt_local_difference))
            conn.commit()       
            cur.close()
        
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
  
        #Create and populate a list of 1min intervals starting from day_start
        met_ints = []
        #add first value (day_start) to 1 min intervals list
        met_ints.append(datetime.datetime.strftime(day_start,"%Y-%m-%d %H:%M:%S"))
        met_int = day_start
        
        for i in range (len(met_1min)-1):
            met_int = met_int + datetime.timedelta(seconds=60)
            met_int_str = datetime.datetime.strftime(met_int,"%Y-%m-%d %H:%M:%S")
            met_ints.append(met_int_str)
        
        #Pad class_5min_list to match the size of 1min list's
        class_5min_list = list(class_5min)
        def insert_element_list(lst, x, n):
            i = n
            while i < len(lst):
                lst.insert(i, x)
                i+= n+1
            return lst
        
        #Add 4 new "0" elements after every original element
        n=1
        for i in range(4):
            insert_element_list(class_5min_list,'0',n)
            n=n+1

        activity_detail_df = pd.DataFrame({'timestamp': pd.Series(met_ints), 'met_1min': pd.Series(met_1min),'class_5min': pd.Series(class_5min_list)})
        ####activity_detail_df = activity_detail_df.fillna(0)
        activity_detail_df = activity_detail_df.where(pd.notnull(activity_detail_df), None)
        
        for row in activity_detail_df.itertuples():
            timestamp_row = row.timestamp
            met_1min_row = row.met_1min
            class_5min_row = row.class_5min

            try:       
                cur = conn.cursor()
                cur.execute(sql_insert_activity_detail,(summary_date,timestamp_row,class_5min_row,met_1min_row))
                conn.commit()       
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    
    #------------------- Retrieve Readiness data ---------------------------
     
    readiness_data = requests.get('https://api.ouraring.com/v1/readiness?'
                              'start={}&end={}&access_token={}'
                              .format(start_date, end_date, access_token))
    json_readiness = readiness_data.json()
    readiness_df = pd.DataFrame(json_readiness['readiness'])
    ####readiness_df = readiness_df.fillna(0)
    readiness_df = readiness_df.where(pd.notnull(readiness_df), None)


    for row in readiness_df.itertuples(): 
        period_id = row.period_id
        rest_mode_state = row.rest_mode_state
        score = row.score
        score_activity_balance = row.score_activity_balance
        score_hrv_balance = row.score_hrv_balance
        score_previous_day = row.score_previous_day
        score_previous_night = row.score_previous_night 
        score_recovery_index = row.score_recovery_index
        score_resting_hr = row.score_resting_hr
        score_sleep_balance = row.score_sleep_balance 
        score_temperature = row.score_temperature
        summary_date = row.summary_date

        with StdoutRedirection(ath_un):
            print('Downloading Oura readiness data from: {}'.format(summary_date))
        with ProgressStdoutRedirection(ath_un):
            print('Downloading Oura readiness data from: {}'.format(summary_date))

        try:       
            cur = conn.cursor()
            cur.execute(sql_insert_readiness_summary,(ath_un,summary_date,period_id,score,score_previous_night,score_sleep_balance,score_previous_day,
                    score_activity_balance,score_resting_hr,score_hrv_balance,score_recovery_index,score_temperature,rest_mode_state)
                        )
            conn.commit()       
            cur.close()

        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
コード例 #12
0
def mm_data_insert(output, start_date, end_date, ath_un, encr_pass,
                   mm_dbx_link, save_pwd, db_host, db_name, superuser_un,
                   superuser_pw):
    # Before executing the script go to your droppox Apps/Mind Monitor folder.
    # Right click on the folder, select 'Share' option and copy the generated link.
    # Assign the link string to the 'mm_dbx_link' parameter.
    user_output = os.path.join(output, ath_un)
    download_folder = os.path.join(user_output, 'MM_Historical_EEG')
    archive_folder = os.path.join(download_folder, 'Archive')

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    path_params = config(filename="encrypted_settings.ini", section="archive")
    preserve_files = str(path_params.get("preserve_files"))

    if save_pwd == True:
        encrypted_link = base64.b64encode(encrypt(mm_dbx_link, encr_pass))
        encrypted_link = encrypted_link.decode('utf-8')
    else:
        encrypted_link = None

    #PG: insert MM export link into database
    mm_user_insert(encrypted_link, ath_un, db_host, db_name, superuser_un,
                   superuser_pw, encr_pass)

    # Download the MM export csv file from dropbox using share link
    mm_dbx_link = mm_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.Will download as .zip file.
    response = urllib.request.urlopen(mm_dbx_link)

    # Extract the downloaded folder in to variable
    compressedFile = io.BytesIO()
    compressedFile.write(response.read())
    compressedFile.seek(0)

    #Check if the date variable are of string or datetime type
    if type(start_date) == str:  #Script run as standalone
        start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d")
    else:  #Script run from flask server (variable passes as datetime type)
        start_date_dt = start_date
    if type(end_date) == str:  #Script run as standalone
        end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d")
    else:  #Script run from flask server (variable passes as datetime type)
        end_date_dt = end_date

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    with zipfile.ZipFile(compressedFile, 'r') as z:
        #List the content of the zip file
        zipfile_content = z.namelist()
        #Iterate through list and only extract files within date range
        for item in zipfile_content:
            #Extract date from filename
            item_date = item[12:22]
            if item_date != '':
                item_date_dt = datetime.datetime.strptime(
                    item_date, "%Y-%m-%d")
            else:
                continue
            # Check if the file date is within the download Date range.
            if item_date_dt > end_date_dt + datetime.timedelta(
                    days=1) or item_date_dt < start_date_dt:
                continue
            else:
                #PG: Check whether the data from this file "file_path" have been inserted into to DB during one of the previous runs
                data_file_exists = check_data_file_exists(
                    os.path.join(download_folder, item), ath_un, db_host,
                    db_name, superuser_un, superuser_pw, encr_pass)
                if data_file_exists == True:
                    with StdoutRedirection(ath_un):
                        print(('{} already exists in {}. Skipping.'.format(
                            item, download_folder)))
                    with ProgressStdoutRedirection(ath_un):
                        print(('{} already exists in {}. Skipping.'.format(
                            item, download_folder)))
                    continue
                z.extract(item, download_folder)
                #Insert filepath into the db
                try:
                    data_file_path_insert(os.path.join(download_folder,
                                                       item), ath_un, db_host,
                                          db_name, superuser_un, superuser_pw,
                                          encr_pass)
                except Exception as e:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.datetime.now()) + ' [' +
                               sys._getframe().f_code.co_name + ']' +
                               ' Error on line {}'.format(
                                   sys.exc_info()[-1].tb_lineno) + '  ' +
                               str(e)))

    for filename in os.listdir(download_folder):
        if filename.endswith(".zip"):
            with zipfile.ZipFile(os.path.join(download_folder, filename),
                                 'r') as z:
                z.extractall(download_folder)
            os.remove(os.path.join(download_folder, filename))
            with StdoutRedirection(ath_un):
                print((
                    'The content of \"{}\" extracted into csv and the original removed'
                    .format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print((
                    'The content of \"{}\" extracted into csv and the original removed'
                    .format(filename)))

    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    conn = None

    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql = """

        INSERT INTO mind_monitor_eeg(timestamp,delta_tp9,
        delta_af7,delta_af8,delta_tp10,athlete_id,theta_tp9,theta_af7,theta_af8,theta_tp10,alpha_tp9,alpha_af7,alpha_af8,alpha_tp10,beta_tp9,beta_af7,beta_af8,
        beta_tp10,gamma_tp9,gamma_af7,gamma_af8,gamma_tp10,raw_tp9,raw_af7,raw_af8,raw_tp10,aux_right,accelerometer_x,accelerometer_y,accelerometer_z,
        gyro_x,gyro_y,gyro_z,head_band_on,hsi_tp9,hsi_af7,hsi_af8,hsi_tp10,battery,elements,timestamp_gmt)

        VALUES
        (%s,%s,%s,%s,%s,(select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
        ,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
        
    """

    sql_local2gmt = """

        SELECT gmt_local_difference FROM gmt_local_time_difference WHERE local_date = %s;

    """

    for filename in os.listdir(download_folder):
        if filename.endswith(".csv"):
            # Read/Parse the .csv file and load the data into pandas dataset
            with StdoutRedirection(ath_un):
                print(
                    ('Parsing and inserting the content of \"{}\" into the DB'.
                     format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print(
                    ('Parsing and inserting the content of \"{}\" into the DB'.
                     format(filename)))
            dataset = pd.read_csv(os.path.join(download_folder, filename),
                                  sep=",",
                                  header=None)
            dataset.drop(dataset.index[:1], inplace=True)  #Drop the first row

            for row in dataset.itertuples():
                local_time = row._1
                local_time_dt = datetime.datetime.strptime(
                    (local_time), "%Y-%m-%d %H:%M:%S.%f")
                local_time_str = datetime.datetime.strftime(
                    local_time_dt, "%Y-%m-%d %H:%M:%S.%f")
                local_date_str = datetime.datetime.strftime((local_time_dt),
                                                            "%Y-%m-%d")
                utc_time_str = None
                delta_tp9 = row._2
                delta_af7 = row._3
                delta_af8 = row._4
                delta_tp10 = row._5
                theta_tp9 = row._6
                theta_af7 = row._7
                theta_af8 = row._8
                theta_tp10 = row._9
                alpha_tp9 = row._10
                alpha_af7 = row._11
                alpha_af8 = row._12
                alpha_tp10 = row._13
                beta_tp9 = row._14
                beta_af7 = row._15
                beta_af8 = row._16
                beta_tp10 = row._17
                gamma_tp9 = row._18
                gamma_af7 = row._19
                gamma_af8 = row._20
                gamma_tp10 = row._21
                raw_tp9 = row._22
                raw_af7 = row._23
                raw_af8 = row._24
                raw_tp10 = row._25
                aux_right = row._26
                accelerometer_x = row._27
                accelerometer_y = row._28
                accelerometer_z = row._29
                gyro_x = row._30
                gyro_y = row._31
                gyro_z = row._32
                head_band_on = row._33
                hsi_tp9 = row._34
                hsi_af7 = row._35
                hsi_af8 = row._36
                hsi_tp10 = row._37
                battery = row._38
                elements = row._39

                # Check if the Record Date is within the download Date range.
                if local_time_dt > end_date_dt + datetime.timedelta(
                        days=1) or local_time_dt < start_date_dt:
                    break
                else:
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        cur.execute(sql_local2gmt, (local_date_str, ))
                        result = cur.fetchone()
                        # close the communication with the PostgreSQL
                        conn.commit()
                        cur.close()
                        if result is not None:
                            utc_time_dt = local_time_dt - result[0]
                            utc_time_str = datetime.datetime.strftime(
                                (utc_time_dt), "%Y-%m-%d %H:%M:%S.%f")
                    except (Exception, psycopg2.DatabaseError) as error:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + '  ' + str(error)))
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        cur.execute(
                            sql,
                            (local_time_str, delta_tp9, delta_af7, delta_af8,
                             delta_tp10, ath_un, theta_tp9, theta_af7,
                             theta_af8, theta_tp10, alpha_tp9, alpha_af7,
                             alpha_af8, alpha_tp10, beta_tp9, beta_af7,
                             beta_af8, beta_tp10, gamma_tp9, gamma_af7,
                             gamma_af8, gamma_tp10, raw_tp9, raw_af7, raw_af8,
                             raw_tp10, aux_right, accelerometer_x,
                             accelerometer_y, accelerometer_z, gyro_x, gyro_y,
                             gyro_z, head_band_on, hsi_tp9, hsi_af7, hsi_af8,
                             hsi_tp10, battery, elements, utc_time_str))
                        conn.commit()
                        # close the communication with the PostgreSQL
                        cur.close()

                    except (Exception, psycopg2.DatabaseError) as error:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + '  ' + str(error)))

            if preserve_files == "false":
                #Remove the csv file from download folder
                os.remove(os.path.join(download_folder, filename))
                with StdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file removed'
                        .format(filename)))
                with ProgressStdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file removed'
                        .format(filename)))

            else:
                #Move the csv to archive folder
                if not os.path.exists(os.path.join(archive_folder, filename)):
                    os.rename(os.path.join(download_folder, filename),
                              os.path.join(archive_folder, filename))
                    with StdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                            .format(filename)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                            .format(filename)))
                else:
                    #Remove the csv file from download folder
                    os.remove(os.path.join(download_folder, filename))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
コード例 #13
0
def ath_auth_register(ath_un, ath_pw, encr_pass):

    sql_create_info_db_table = """
    CREATE TABLE IF NOT EXISTS public.db_info
        (id serial NOT NULL,
        ath_un character varying(300) COLLATE pg_catalog."default",
        ath_pw character varying(300) COLLATE pg_catalog."default",
        db_name character varying(300) COLLATE pg_catalog."default",
        db_host character varying(300) COLLATE pg_catalog."default",
        db_un character varying(300) COLLATE pg_catalog."default",
        db_pw character varying(300) COLLATE pg_catalog."default",
        last_synch character varying(300) COLLATE pg_catalog."default",
        db_auto_synch boolean,
        CONSTRAINT db_info_pkey PRIMARY KEY (id),
        CONSTRAINT db_info_unique UNIQUE (db_name))
    WITH (OIDS = FALSE)
    TABLESPACE pg_default;
    ALTER TABLE public.db_info OWNER to postgres;
    """
    sql_insert_db_info_record = "INSERT INTO db_info (ath_un,ath_pw,db_name) VALUES (%s,%s,%s);"

    ath_pw_hash = generate_password_hash(ath_pw)

    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'
    query_params = (ath_un, ath_pw_hash, db_name)

    try:
        # connect to the PostgreSQL server
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur = conn_localhost.cursor()

        cur.execute(sql_create_info_db_table)
        cur.execute(sql_insert_db_info_record, query_params)

        # close the communication with the PostgreSQL
        cur.close()

        # Add the user to superset fab
        superset_params = config(filename="encrypted_settings.ini",
                                 section="superset",
                                 encr_pass=encr_pass)
        superset_used = str(superset_params.get("superset"))
        if superset_used == 'true':
            head, sep, tail = ath_un.partition('@')
            usr_name = head
            os.system(
                "superset fab create-user --role ath_role1 --username {} --firstname {} --lastname {} --email {} --password {}"
                .format(ath_un, usr_name, usr_name, ath_un, ath_pw))

    except (Exception, psycopg2.IntegrityError) as error:
        print((str(datetime.datetime.now()) + ' [' +
               sys._getframe().f_code.co_name + ']' +
               ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
               '  ' + str(error)))
        return None
    finally:
        if conn_localhost is not None:
            conn_localhost.close()
    return ath_un
コード例 #14
0
def create_sample_db(encr_pass):
    conn = None

    try:
        # read connection parameters
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)

        sample_db_host = params.get("sample_db_host")
        sample_db_port = params.get("sample_db_port")
        if sample_db_port == "":
            sample_db_port == "5432"
        db_name = params.get("sample_db")
        superuser_un = params.get("user")
        superuser_pw = params.get("password")
        ro_user = params.get("ro_user")
        ro_password = params.get("ro_password")

        sql_get_lc_collate = "SHOW LC_COLLATE"
        sql_check_db_exists = "select exists(SELECT datname FROM pg_catalog.pg_database WHERE datname = %s);"
        sql_create_db = "CREATE DATABASE \"" + db_name + "\" WITH TEMPLATE = template0 ENCODING = 'UTF8' LC_COLLATE = %s LC_CTYPE = %s;"
        sql_create_ro_role = "CREATE ROLE " + ro_user + " WITH LOGIN PASSWORD \'" + ro_password + "\' NOSUPERUSER INHERIT NOCREATEDB NOCREATEROLE NOREPLICATION VALID UNTIL \'infinity\';"
        sql_grant_permissions_1 = "GRANT CONNECT ON DATABASE " + db_name + " TO " + ro_user + ";"
        sql_grant_permissions_2 = "GRANT USAGE ON SCHEMA public TO " + ro_user + ";"
        sql_grant_permissions_3 = "GRANT SELECT ON ALL TABLES IN SCHEMA public TO " + ro_user + ";"
        sql_grant_permissions_4 = "GRANT SELECT ON ALL SEQUENCES IN SCHEMA public TO " + ro_user + ";"
        sql_revoke_public = "REVOKE ALL ON DATABASE \"" + db_name + "\" FROM PUBLIC;"
        sql_revoke_public_1 = "REVOKE ALL ON DATABASE postgres FROM PUBLIC;"

        # connect to the PostgreSQL server postgres db to check whether sample db exists and create it if doesnt.
        conn = psycopg2.connect(dbname="postgres",
                                user=superuser_un,
                                password=superuser_pw,
                                host=sample_db_host,
                                port=sample_db_port)
        conn.autocommit = True

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ConsolidatedProgressStdoutRedirection():
            print('Checking whether the sample DB aleady exists')

        cur.execute(sql_check_db_exists, (db_name, ))
        result = cur.fetchone()
        if result[0] is True:
            db_exists = True
            with ConsolidatedProgressStdoutRedirection():
                print('Sample DB already exists, exiting...')
        else:
            db_exists = False

        if db_exists == False:
            with ConsolidatedProgressStdoutRedirection():
                print("Creating sample DB...")
            cur.execute(sql_get_lc_collate)
            lc_collate = cur.fetchone()
            cur.execute(sql_create_db, (lc_collate[0], lc_collate[0]))
            cur.close()
            conn.close
            #PG: Read SQL query to restore db schema from file
            sql_file = open('db_schema.sql', 'r')
            sql_restore_db_schema = s = " ".join(sql_file.readlines())
            try:
                # connect to the PostgreSQL server (sample_db)
                conn = psycopg2.connect(dbname=db_name,
                                        user=superuser_un,
                                        password=superuser_pw,
                                        host=sample_db_host,
                                        port=sample_db_port)
                conn.autocommit = True
                # create a cursor
                cur = conn.cursor()
                with ConsolidatedProgressStdoutRedirection():
                    print("Restoring sample DB schema...")
                cur.execute(sql_restore_db_schema)
                cur.close()
                cur = conn.cursor()
                with ConsolidatedProgressStdoutRedirection():
                    print(
                        "Creating sample db RO user and granting RO permissions..."
                    )
                cur.execute(sql_create_ro_role)
                cur.close()
                cur = conn.cursor()
                cur.execute(sql_grant_permissions_1)
                cur.execute(sql_grant_permissions_2)
                cur.execute(sql_grant_permissions_3)
                cur.execute(sql_grant_permissions_4)
                cur.execute(sql_revoke_public)
                cur.execute(sql_revoke_public_1)
                cur.close()
                conn.close()
            except (Exception, psycopg2.DatabaseError) as error:
                with ConsolidatedProgressStdoutRedirection():
                    print((str(datetime.datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' +
                           str(error)))
        else:
            cur.close()
            conn.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ConsolidatedProgressStdoutRedirection():
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
コード例 #15
0
def get_databases_list(encr_pass):
    conn = None

    sql_get_databases = """
    SELECT
        datname
    FROM
       pg_database 
    WHERE 
       datname like '%_Athlete_Data_DB';
    """
    # Retrieve list of databases with last synch older than n
    sql_get_databases_db_info = """
    SELECT
        db_name
    FROM
       db_info 
    WHERE 
       db_auto_synch = 'true' AND last_synch::timestamp < %s::timestamp;
    """

    try:
        # read connection parameters
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        autosynch_params = config(filename="encrypted_settings.ini",
                                  section="autosynch",
                                  encr_pass=encr_pass)

        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        interval = int(
            autosynch_params.get("interval")
        )  #The amount of time in seconds to wait before attempting next synch
        time_now = datetime.datetime.now()
        now_less_interval_dt = time_now - datetime.timedelta(seconds=interval)
        now_less_interval_str = now_less_interval_dt.strftime(
            "%Y-%m-%d %H:%M:%S")

        conn = psycopg2.connect(dbname=postgres_db,
                                user=postgres_un,
                                password=postgres_pw)

        # connect to the PostgreSQL server
        with ConsolidatedProgressStdoutRedirection():
            print(
                'Connecting to the PostgreSQL server to get list of databases...'
            )

        # create a cursor
        cur = conn.cursor()
        # Retrieve list of databases with last synch older than n
        cur.execute(sql_get_databases_db_info, (now_less_interval_str, ))
        conn.commit()
        databases = cur.fetchall()

    except (Exception, psycopg2.DatabaseError) as error:
        with ConsolidatedProgressStdoutRedirection():
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    return databases
コード例 #16
0
def glimp_data_insert(output, start_date, end_date, ath_un, encr_pass,
                      glimp_dbx_link, save_pwd, db_host, db_name, superuser_un,
                      superuser_pw):
    # Before executing the script go to your droppox Apps/Glimp folder and locate ClicemiaMisurazioni.csv.gz file.
    # Right click on the file, select 'Share' option and copy the generated link.
    # Assign the link string to the 'glimp_dbx_link' parameter.
    glimp_export_csv = 'GlicemiaMisurazioni.csv'
    glimp_export_csv_gz = 'GlicemiaMisurazioni.csv.gz'
    user_output = os.path.join(output, ath_un)
    download_folder = os.path.join(user_output, 'CGM_Historical_BG')
    archive_folder = os.path.join(download_folder, 'Archive')
    glimp_file_path = os.path.join(download_folder, glimp_export_csv)
    glimp_file_path_gz = os.path.join(download_folder, glimp_export_csv_gz)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    path_params = config(filename="encrypted_settings.ini", section="archive")
    preserve_files = str(path_params.get("preserve_files"))

    if save_pwd == True:
        encrypted_link = base64.b64encode(encrypt(glimp_dbx_link, encr_pass))
        encrypted_link = encrypted_link.decode('utf-8')
    else:
        encrypted_link = None

    #Check if the date variable are of string or datetime type
    if type(start_date) == str:  #Script runs as standalone
        start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d")
    else:  #Script runs from flask server (variable passes as datetime type)
        start_date_dt = start_date
    if type(end_date) == str:  #Script run as standalone
        end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d")
    else:  #Script runs from flask server (variable passes as datetime type)
        end_date_dt = end_date

    # Download the glimp export csv file from dropbox using share link
    glimp_dbx_link = glimp_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.
    response = urllib.request.urlopen(glimp_dbx_link)
    # Extract the downloaded file
    compressedFile = io.BytesIO()
    compressedFile.write(response.read())
    compressedFile.seek(0)

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    with zipfile.ZipFile(compressedFile, 'r') as z:
        #List the content of the zip file
        zipfile_content = z.namelist()
        #Iterate through list and only extract files matching the name
        for item in zipfile_content:
            if item == glimp_export_csv_gz:  #The File is generated by Glimp app
                z.extract(item, download_folder)
                # Write the extracted content to a new csv file
                glimp_export = gzip.GzipFile(glimp_file_path_gz, mode='rb')
                with open(glimp_file_path, 'wb') as f:
                    f.write(glimp_export.read())
                glimp_export.close
                #os.remove(os.path.join(download_folder,item)) #Delete the GZ file after content extracted
            else:  #The file is manualy download from LibreView
                z.extract(item, download_folder)
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    conn = None

    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql = """

        INSERT INTO diasend_cgm(athlete_id,timestamp,glucose_nmol_l,glucose_nmol_l_15min_avrg,ketone_nmol_l,data_source,timestamp_gmt)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s)

        ON CONFLICT (athlete_id,timestamp,data_source) DO NOTHING;
        
        """

    sql_local2gmt = """

        SELECT gmt_local_difference FROM gmt_local_time_difference WHERE local_date = %s;

    """

    def parse_dataset(dataset, conn, start_date_dt, end_date_dt, data_source):
        for row in dataset.itertuples():
            glucose_nmol = None
            glucose_nmol_avrg = None
            ketone_nmol = None
            utc_time_str = None
            if data_source == 'glimp_android':
                record_type = row._9  #Current(5) or 15min average(3)
                if record_type == 5:
                    glucose_nmol = round((float(row._6) / 18), 2)
                else:
                    glucose_nmol_avrg = round((float(row._6) / 18), 2)
                local_time = row._2
                epoch_time = row._3
                local_time_dt = datetime.datetime.strptime((local_time),
                                                           "%d/%m/%Y %H.%M.%S")
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                utc_time_str = datetime.datetime.utcfromtimestamp(
                    epoch_time).strftime('%Y-%m-%d %H:%M:%S')
            else:
                ketone_nmol = row._7
                glucose_nmol = row._6
                glucose_nmol_avrg = row._5
                local_time = row._3
                local_time_dt = local_time.to_pydatetime(
                )  #Convert from pandas to python datetime
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                local_date_str = datetime.datetime.strftime((local_time_dt),
                                                            "%Y-%m-%d")
                try:
                    # create a cursor
                    cur = conn.cursor()
                    # execute a statement
                    cur.execute(sql_local2gmt, (local_date_str, ))
                    result = cur.fetchone()
                    # close the communication with the PostgreSQL
                    conn.commit()
                    cur.close()
                    if result is not None:
                        utc_time_dt = local_time_dt - result[0]
                        utc_time_str = datetime.datetime.strftime(
                            (utc_time_dt), "%Y-%m-%d %H:%M:%S")
                except (Exception, psycopg2.DatabaseError) as error:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + '  ' + str(error)))

            # Compare if the Record Date is within the download Date range. This only works if parsed from top of the csv file.
            # The most recent record at the top
            if local_time_dt > end_date_dt + datetime.timedelta(days=1):
                continue
            if local_time_dt < start_date_dt:
                break

            with ProgressStdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))
            with StdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))

            try:
                # create a cursor
                cur = conn.cursor()
                # execute a statement
                cur.execute(
                    sql,
                    (ath_un, local_time_str, glucose_nmol, glucose_nmol_avrg,
                     ketone_nmol, data_source, utc_time_str))
                conn.commit()
                # close the communication with the PostgreSQL
                cur.close()

            except (Exception, psycopg2.DatabaseError) as error:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + '  ' + str(error)))

    # Iterate throught files in the download folder
    for item in os.listdir(download_folder):
        #The file is an export from Glimp app
        if item == glimp_export_csv:
            #Insert Glimp export link into database
            glimp_user_insert(encrypted_link, ath_un, db_host, db_name,
                              superuser_un, superuser_pw, encr_pass)
            data_source = 'glimp_android'
            # Read/Parse the .csv file
            dataset = pd.read_csv(glimp_file_path,
                                  sep=";",
                                  encoding='UTF-16 LE',
                                  header=None)
            parse_dataset(dataset, conn, start_date_dt, end_date_dt,
                          data_source)
            if preserve_files == "false":
                #Remove the csv file from download folder
                os.remove(os.path.join(download_folder, item))
            else:
                #Move the csv to archive folder
                if not os.path.exists(os.path.join(archive_folder, item)):
                    os.rename(os.path.join(download_folder, item),
                              os.path.join(archive_folder, item))
                else:
                    os.remove(os.path.join(download_folder, item))
        #The file is an export from LibreView
        elif re.match(
                r"^[a-zA-Z0-9]+_glucose_[0-9|-]+.csv",
                item):  #Search for pattern FirstLast_glucose_dd-mm-yyyy.csv
            #Insert LIbreView export link into database
            libreview_user_insert(encrypted_link, ath_un, db_host, db_name,
                                  superuser_un, superuser_pw, encr_pass)
            data_source = 'libre_view'
            dataset = pd.read_csv(os.path.join(download_folder, item),
                                  sep=",",
                                  skiprows=1)
            dataset = dataset.iloc[:, [
                0, 1, 2, 3, 4, 5, 15
            ]]  # Keep first 6 columns and column 16 and drop everything else
            dataset['Device Timestamp'] = pd.to_datetime(
                dataset['Device Timestamp'], format="%d-%m-%Y %H:%M")
            dataset = dataset.sort_values(by='Device Timestamp',
                                          ascending=False)
            dataset = dataset.dropna(
                subset=[
                    'Historic Glucose mmol/L', 'Scan Glucose mmol/L',
                    'Ketone mmol/L'
                ],
                thresh=1)  #Drop row if NaN in all three columns
            dataset = dataset.replace({np.nan:
                                       None})  #Replace NaN values with None
            parse_dataset(dataset, conn, start_date_dt, end_date_dt,
                          data_source)
            if preserve_files == "false":
                #Remove the csv file from download folder
                os.remove(os.path.join(download_folder, item))
            else:
                #Move the csv to archive folder
                if not os.path.exists(os.path.join(archive_folder, item)):
                    os.rename(os.path.join(download_folder, item),
                              os.path.join(archive_folder, item))
                else:
                    os.remove(os.path.join(download_folder, item))
        else:
            continue

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
コード例 #17
0
def dwnld_insert_strava_data(ath_un, db_host, db_name, superuser_un,
                             superuser_pw, strava_refresh_token, start_date_dt,
                             end_date_dt, save_pwd, encr_pass):

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    strava_params = config(filename="encrypted_settings.ini",
                           section="strava",
                           encr_pass=encr_pass)
    STRAVA_CLIENT_ID = str(strava_params.get("strava_client_id"))
    STRAVA_CLIENT_SECRET = str(strava_params.get("strava_client_secret"))
    STRAVA_TOKEN_URL = str(strava_params.get("strava_token_url"))

    sql_insert_strava_refresh_token = """

        DO
            $do$
                BEGIN
                    IF EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN
                        UPDATE athlete SET strava_refresh_token = %s where ath_un= %s;
                    END IF;
                END
            $do$

    """
    sql_insert_activity_summary = """

        INSERT INTO strava_activity_summary(athlete_id,strava_activity_id,strava_athlete_id,name,distance,moving_time,elapsed_time,total_elevation_gain,type,
        workout_type,external_id,upload_id,start_date,start_date_local,timezone,utc_offset,start_latitude,start_longitude,end_latitude,end_longitude,location_city,
        location_state,location_country,map,summary_polyline,trainer,commute,manual,gear_id,average_speed,max_speed,average_cadence,average_temp,average_watts,
        weighted_average_watts,kilojoules,device_watts,average_heartrate,max_heartrate,max_watts,elev_high,elev_low,suffer_score)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (start_date) DO NOTHING;
        
    """

    sql_insert_activity_streams = """

        INSERT INTO strava_activity_streams(activity_id,altitude,cadence,distance,grade_smooth,heartrate,latitude,longitude,moving,temp,time_gmt,velocity_smooth,watts)

        VALUES
        ((select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (time_gmt) DO NOTHING;
    """

    sql_insert_utc_offset = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
        """

    sql_timezone = """
        INSERT INTO timezones(gc_activity_id,strava_activity_id,timestamp_local,timestamp_gmt,timezone,long_degr,lat_degr,alt_avrg,end_time_gmt)
        
        VALUES
        (null,(select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (timestamp_gmt,long_degr,lat_degr) DO NOTHING;
        """

    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    def refresh_oauth_tokens():
        #Refresh the access token
        payload = dict(grant_type='refresh_token',
                       refresh_token=strava_refresh_token,
                       client_id=STRAVA_CLIENT_ID,
                       client_secret=STRAVA_CLIENT_SECRET)
        refresh = requests.post(STRAVA_TOKEN_URL, data=payload)
        response = refresh.json()
        oauth_token = response['access_token']
        refresh_token = response['refresh_token']
        expires_at = response['expires_at']

        if save_pwd == True:
            encrypted_refresh_token = base64.b64encode(
                encrypt(refresh_token, encr_pass))
            encrypted_refresh_token = encrypted_refresh_token.decode('utf-8')
        else:
            encrypted_refresh_token = None

        try:
            cur = conn.cursor()
            cur.execute(sql_insert_strava_refresh_token,
                        (ath_un, encrypted_refresh_token, ath_un))
            conn.commit()
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

        return oauth_token, expires_at

    oauth_token, expires_at = refresh_oauth_tokens()
    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
    epoch_start_date = start_date_dt.timestamp()
    epoch_end_date = end_date_dt.timestamp()

    #Loop through all activities
    page = 1
    activities_url = "https://www.strava.com/api/v3/activities"

    while True:
        # Retry 3 times if request fails
        tries = 3
        for i in range(tries):
            try:
                #Check if the oauth token is stil valid, and refresh if not.
                if int(datetime.now().timestamp()) > expires_at:
                    oauth_token, expires_at = refresh_oauth_tokens()
                    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
                    with ProgressStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            'The Strava oauth token expired and has been refreshed'
                        )
                # get page of 200 activities from Strava
                r = requests.get(
                    '{}?before={}&after={}&page={}&per_page=200'.format(
                        activities_url, epoch_end_date, epoch_start_date,
                        str(page)),
                    headers=header)
                sleep_sec = api_rate_limits(r)
            except Exception as e:
                if i < tries - 1:
                    time.sleep(10)
                    continue
                else:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + ' [' +
                               sys._getframe().f_code.co_name + ']' +
                               ' Error on line {}'.format(
                                   sys.exc_info()[-1].tb_lineno) + '  ' +
                               str(e)))
                    raise
            break

        if sleep_sec == 0:
            pass
        else:
            time.sleep(sleep_sec)
            pass

        # if no results then exit loop
        if (not r.json()):
            if conn is not None:
                conn.close()
                with StdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
                with ProgressStdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
            break

        #Do something with the response/data
        activity_data = json.loads(r.text)

        #assign activity values to variables
        for activity in activity_data:
            if "id" in activity:
                strava_activity_id = activity['id']
                #PG: Check whether the data for this activity have been inserted into to DB during one of the previous runs
                data_exist_for_activity = 'Strava activity {} data'.format(
                    strava_activity_id)
                data_exists = check_data_file_exists(data_exist_for_activity,
                                                     ath_un, db_host, db_name,
                                                     superuser_un,
                                                     superuser_pw, encr_pass)
                if data_exists == True:
                    with StdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping.'
                            .format(strava_activity_id)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping'
                            .format(strava_activity_id)))
                    continue
            else:
                strava_activity_id = None
            if "id" in activity['athlete']:
                strava_athlete_id = activity['athlete']['id']
            else:
                strava_athlete_id = None
            if "name" in activity:
                name = activity['name']
            else:
                name = None
            if "distance" in activity:
                distance = activity['distance']
            else:
                distance = None
            if "moving_time" in activity:
                moving_time = activity['moving_time']
            else:
                moving_time = None
            if "elapsed_time" in activity:
                elapsed_time = activity['elapsed_time']
            else:
                elapsed_time = None
            if "total_elevation_gain" in activity:
                total_elevation_gain = activity['total_elevation_gain']
            else:
                total_elevation_gain = None
            if "type" in activity:
                type = activity['type']
            else:
                type = None
            if "workout_type" in activity:
                workout_type = activity['workout_type']
            else:
                workout_type = None
            if "external_id" in activity:
                external_id = activity['external_id']
            else:
                external_id = None
            if "upload_id" in activity:
                upload_id = activity['upload_id']
            else:
                upload_id = None
            if "start_date" in activity:
                start_date = activity['start_date']
                start_date_dt = datetime.strptime(start_date,
                                                  '%Y-%m-%dT%H:%M:%SZ')
                start_date = datetime.strftime(start_date_dt,
                                               '%Y-%m-%d %H:%M:%S')
            else:
                start_date = None
            if "start_date_local" in activity:
                start_date_local = activity['start_date_local']
                start_date_local_dt = datetime.strptime(
                    start_date_local, '%Y-%m-%dT%H:%M:%SZ')
                start_date_local = datetime.strftime(start_date_local_dt,
                                                     '%Y-%m-%d %H:%M:%S')
            else:
                start_date_local = None
            if "timezone" in activity:
                timezone = activity['timezone']
            else:
                timezone = None
            if "utc_offset" in activity:
                utc_offset = activity['utc_offset']
            else:
                utc_offset = None
            if "start_latlng" in activity:
                start_latlng = activity['start_latlng']
                #if start_latlng is not None: #Changed 2021/10/29
                if start_latlng:
                    start_latitude = start_latlng[0]
                    start_longitude = start_latlng[1]
                else:
                    start_latitude = None
                    start_longitude = None
            else:
                start_latitude = None
                start_longitude = None
            if "end_latlng" in activity:
                end_latlng = activity['end_latlng']
                #if end_latlng is not None: #Changed 2021/10/29
                if end_latlng:
                    end_latitude = end_latlng[0]
                    end_longitude = end_latlng[1]
                else:
                    end_latitude = None
                    end_longitude = None
            else:
                end_latitude = None
                end_longitude = None
            if "location_city" in activity:
                location_city = activity['location_city']
            else:
                location_city = None
            if "location_state" in activity:
                location_state = activity['location_state']
            else:
                location_state = None
            if "location_country" in activity:
                location_country = activity['location_country']
            else:
                location_country = None
            if "id" in activity['map']:
                map = activity['map']['id']
            else:
                map = None
            if "summary_polyline" in activity['map']:
                summary_polyline = activity['map']['summary_polyline']
            else:
                summary_polyline = None
            if "trainer" in activity:
                trainer = activity['trainer']
            else:
                trainer = None
            if "commute" in activity:
                commute = activity['commute']
            else:
                commute = None
            if "manual" in activity:
                manual = activity['manual']
            else:
                manual = None
            if "gear_id" in activity:
                gear_id = activity['gear_id']
            else:
                gear_id = None
            if "average_speed" in activity:
                average_speed = activity['average_speed']
            else:
                average_speed = None
            if "max_speed" in activity:
                max_speed = activity['max_speed']
            else:
                max_speed = None
            if "average_cadence" in activity:
                average_cadence = activity['average_cadence']
            else:
                average_cadence = None
            if "average_temp" in activity:
                average_temp = activity['average_temp']
            else:
                average_temp = None
            if "average_watts" in activity:
                average_watts = activity['average_watts']
            else:
                average_watts = None
            if "weighted_average_watts" in activity:
                weighted_average_watts = activity['weighted_average_watts']
            else:
                weighted_average_watts = None
            if "kilojoules" in activity:
                kilojoules = activity['kilojoules']
            else:
                kilojoules = None
            if "device_watts" in activity:
                device_watts = activity['device_watts']
            else:
                device_watts = None
            if "average_heartrate" in activity:
                average_heartrate = activity['average_heartrate']
            else:
                average_heartrate = None
            if "max_heartrate" in activity:
                max_heartrate = activity['max_heartrate']
            else:
                max_heartrate = None
            if "max_watts" in activity:
                max_watts = activity['max_watts']
            else:
                max_watts = None
            if "elev_high" in activity:
                elev_high = activity['elev_high']
            else:
                elev_high = None
            if "elev_low" in activity:
                elev_low = activity['elev_low']
            else:
                elev_low = None
            if "suffer_score" in activity:
                suffer_score = activity['suffer_score']
            else:
                suffer_score = None

            with StdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))
            with ProgressStdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))

            #Insert activity summary data to DB
            try:
                cur = conn.cursor()
                cur.execute(
                    sql_insert_activity_summary,
                    (ath_un, strava_activity_id, strava_athlete_id, name,
                     distance, moving_time, elapsed_time, total_elevation_gain,
                     type, workout_type, external_id, upload_id, start_date,
                     start_date_local, timezone, utc_offset, start_latitude,
                     start_longitude, end_latitude, end_longitude,
                     location_city, location_state, location_country, map,
                     summary_polyline, trainer, commute, manual, gear_id,
                     average_speed, max_speed, average_cadence, average_temp,
                     average_watts, weighted_average_watts, kilojoules,
                     device_watts, average_heartrate, max_heartrate, max_watts,
                     elev_high, elev_low, suffer_score))
                conn.commit()
                cur.close()
                # Update the files table
                data_file_path_insert(data_exist_for_activity, ath_un, db_host,
                                      db_name, superuser_un, superuser_pw,
                                      encr_pass)

                #Insert utc offset into gmt_local_time_difference table if the record not already present from gc
                gmt_local_difference = timedelta(seconds=utc_offset)
                local_date = start_date_local_dt.date()
                local_midnight = start_date_local_dt.replace(hour=00,
                                                             minute=00,
                                                             second=00)
                gmt_midnight = local_midnight - gmt_local_difference

                cur = conn.cursor()
                cur.execute(sql_insert_utc_offset,
                            (ath_un, local_date, local_midnight, gmt_midnight,
                             gmt_local_difference))
                conn.commit()
                cur.close()

                #Insert timezone info into timezones table if the record not already present from gc
                if start_latitude is not None:
                    #Calculate avereage altitude for a better weather data accuracy
                    avg_altitude = int((elev_high + elev_low) / 2)
                    #Drop (GMT+hh:mm) from timezone string
                    timezone = re.sub("[(].*[) ]", "", timezone)
                    #Calculate end_time_gmt_str
                    end_time_gmt_dt = start_date_dt + timedelta(
                        seconds=elapsed_time)
                    end_time_gmt_str = datetime.strftime((end_time_gmt_dt),
                                                         "%Y-%m-%d %H:%M:%S")
                    cur = conn.cursor()
                    with StdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    with ProgressStdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    cur.execute(
                        sql_timezone,
                        (strava_activity_id, start_date_local, start_date,
                         timezone, start_longitude, start_latitude,
                         avg_altitude, end_time_gmt_str))
                    conn.commit()
                    cur.close()

            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

            #Get all available streams for the activity
            types = 'time,distance,latlng,altitude,velocity_smooth,heartrate,cadence,watts,temp,moving,grade_smooth'
            df_columns = {
                'activity_id', 'time_gmt', 'distance', 'latitude', 'longitude',
                'altitude', 'velocity_smooth', 'heartrate', 'cadence', 'watts',
                'temp', 'moving', 'grade_smooth'
            }
            df_columns = sorted(df_columns)

            # Retry 3 times if request fails
            tries = 3
            for i in range(tries):
                try:
                    #Check if the oauth token is stil valid, and refresh if not.
                    if int(datetime.now().timestamp()) > expires_at:
                        oauth_token, expires_at = refresh_oauth_tokens()
                        header = {
                            'Authorization': 'Bearer {}'.format(oauth_token)
                        }
                        with ProgressStdoutRedirection(ath_un):
                            print(
                                str(datetime.now()) +
                                'The Strava oauth token expired and has been refreshed'
                            )
                    streams = requests.get(
                        "https://www.strava.com/api/v3/activities/{}/streams?keys={}"
                        .format(strava_activity_id, types),
                        headers=header)
                    sleep_sec = api_rate_limits(streams)
                except Exception as e:
                    if i < tries - 1:
                        time.sleep(10)
                        continue
                    else:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + ' [' +
                                   sys._getframe().f_code.co_name + ']' +
                                   ' Error on line {}'.format(
                                       sys.exc_info()[-1].tb_lineno) + '  ' +
                                   str(e)))
                        raise
                break

            if sleep_sec == 0:
                pass
            else:
                time.sleep(sleep_sec)
                pass

            #TODO: if sleep_sec > 6hrs request a new access_token

            #Do something with the response/data

            activity_streams = json.loads(streams.text)
            #Check if there is data in activity_streams
            if 'message' in activity_streams:
                if "Resource Not Found" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' No streams retrieved for {}. Moving onto next activity.'
                            .format(strava_activity_id))
                    continue
                if "Rate Limit Exceeded" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' Download Rate limit exceeded. This attempt did not get caught by api_rate_limits() function!'
                        )
                    #Usualy due to failed request on a very last attempt before limit, and subsequent succesfull retry.Too messy, move onto next activity and see what happens.
                    continue
            else:
                pass

            activity_streams_df = pd.DataFrame(columns=df_columns)

            for stream in activity_streams:
                if "time" in stream['type']:
                    activity_id_list = []
                    time_stream = stream['data']
                    for i in range(len(time_stream)):
                        time_stream[i] = start_date_dt + timedelta(
                            0, int(time_stream[i]))
                        time_stream[i] = datetime.strftime(
                            time_stream[i], '%Y-%m-%d %H:%M:%S')
                        activity_id_list.append(strava_activity_id)
                    activity_streams_df['activity_id'] = activity_id_list
                    activity_streams_df['time_gmt'] = time_stream

                if "distance" in stream['type']:
                    distance_stream = stream['data']
                    activity_streams_df['distance'] = distance_stream

                if "latlng" in stream['type']:
                    latlng_stream = stream['data']
                    latitude = []
                    longitude = []
                    if latlng_stream is not None:
                        for i in range(len(latlng_stream)):
                            latitude.append(latlng_stream[i][0])
                            longitude.append(latlng_stream[i][1])
                    activity_streams_df['latitude'] = latitude
                    activity_streams_df['longitude'] = longitude

                if "altitude" in stream['type']:
                    altitude_stream = stream['data']
                    activity_streams_df['altitude'] = altitude_stream

                if "velocity_smooth" in stream['type']:
                    velocity_smooth_stream = stream['data']
                    activity_streams_df[
                        'velocity_smooth'] = velocity_smooth_stream

                if "heartrate" in stream['type']:
                    heartrate_stream = stream['data']
                    activity_streams_df['heartrate'] = heartrate_stream

                if "cadence" in stream['type']:
                    cadence_stream = stream['data']
                    activity_streams_df['cadence'] = cadence_stream

                if "watts" in stream['type']:
                    watts_stream = stream['data']
                    activity_streams_df['watts'] = watts_stream

                if "temp" in stream['type']:
                    temp_stream = stream['data']
                    activity_streams_df['temp'] = temp_stream

                if "moving" in stream['type']:
                    moving_stream = stream['data']
                    activity_streams_df['moving'] = moving_stream

                if "grade_smooth" in stream['type']:
                    grade_smooth_stream = stream['data']
                    activity_streams_df['grade_smooth'] = grade_smooth_stream

            activity_streams_df = activity_streams_df.reindex(
                sorted(activity_streams_df.columns),
                axis=1)  # Sort df alphabeticaly
            activity_streams_df = activity_streams_df.where(
                pd.notnull(activity_streams_df), None)  # Replace NaN with None
            df2list = activity_streams_df.values.tolist()

            #Insert activity streams data to DB
            try:
                cur = conn.cursor()
                for sublist in df2list:
                    cur.execute(sql_insert_activity_streams, (sublist))
                    conn.commit()
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

        # increment page
        page += 1
コード例 #18
0
def cstm_data_insert(ath_un, usr_params_str, usr_params_int, db_host,
                     superuser_un, superuser_pw):
    cstm_dbx_link = usr_params_str[0]
    download_folder = usr_params_str[1]
    download_folder = os.path.join(download_folder, 'CSTM_Temp_CSV')
    archive_folder = os.path.join(download_folder, 'Archive')
    table_name = usr_params_str[2]
    date_format = usr_params_str[3]
    timezone = usr_params_str[4]
    datetime_clmn = usr_params_str[5]
    unique = usr_params_int
    dataframes = []

    # Download the csv file from dropbox using share link
    with StdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    with ProgressStdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    cstm_dbx_link = cstm_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.Will download as .zip file.
    table_name = 'cstm_{}'.format(table_name)
    response = urllib.request.urlopen(cstm_dbx_link)
    sniffer = csv.Sniffer()
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    archive_params = config(filename="encrypted_settings.ini",
                            section="archive")
    preserve_files = str(archive_params.get("preserve_files"))

    #Insert usr_params to athlete table..
    cstm_user_insert(ath_un, usr_params_str, usr_params_int, table_name,
                     db_host, db_name, superuser_un, superuser_pw)

    conn = None
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql_select_athlete_id = """
        SELECT id FROM athlete WHERE ath_un = %s;
    """

    # Extract the downloaded file in to variable
    downloadFile = io.BytesIO()
    downloadFile.write(response.read())
    downloadFile.seek(0)  #Move pointer to the beginning

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    #Insert the data from csv to dataframe
    if zipfile.is_zipfile(downloadFile):
        with zipfile.ZipFile(downloadFile, 'r') as z:
            #List the content of the zip file
            zipfile_content = z.namelist()
            #Iterate through list and only extract files
            for item in zipfile_content:
                z.extract(item, download_folder)
        downloadFile.close()
        for filename in os.listdir(download_folder):
            if os.path.isfile(os.path.join(download_folder, filename)):
                #Detect whether the csv file has a header.
                with open(os.path.join(download_folder, filename), 'r') as f:
                    has_header = sniffer.has_header(f.read())
                    if has_header:
                        f.seek(0)  #Move pointer to the beginning
                        data_count = csv_match_columns2data(ath_un, f)
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         usecols=data_count)
                    else:
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         header=None)
                dataframes.append(df)
                f.close()
                #Archive csv files
                if preserve_files == "false":
                    #Remove the csv file from download folder
                    os.remove(os.path.join(download_folder, filename))
                    with StdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                else:
                    #Move the csv to archive folder and remove from download
                    if os.path.exists(os.path.join(archive_folder, filename)):
                        #Replace the existing file with the updated one
                        os.remove(os.path.join(archive_folder, filename))
                        os.rename(os.path.join(download_folder, filename),
                                  os.path.join(archive_folder, filename))
                        with StdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                        with ProgressStdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                    else:
                        #Remove the csv file from download folder
                        os.remove(os.path.join(download_folder, filename))
    else:
        filename = re.search(r'([^\/]+)\?.*$', cstm_dbx_link).group(
            1)  #Exctract the filename from the link
        downloadFile.seek(0)  #Move pointer to the beginning
        #Detect whether the csv file has a header.
        wrapper = io.TextIOWrapper(downloadFile, encoding='utf-8')
        has_header = sniffer.has_header(wrapper.read())
        downloadFile.seek(0)  #Move pointer to the beginning
        if has_header:
            data_count = csv_match_columns2data(ath_un, wrapper)
            downloadFile.seek(0)  #Move pointer to the beginning
            df = pd.read_csv(downloadFile, usecols=data_count)
        else:
            df = pd.read_csv(downloadFile, header=None)
        #Archive downloaded File
        if preserve_files == "true":
            downloadFile.seek(0)  #Move pointer to the beginning
            with open(os.path.join(archive_folder, filename), 'wb') as f:
                f.write(downloadFile.read())
                with StdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
                with ProgressStdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
        else:
            with StdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
        wrapper.close()
        dataframes.append(df)
        downloadFile.close()

    #Remove duplicates
    dataframes = pd.concat(dataframes).drop_duplicates().reset_index(drop=True)

    #---- Parse and format datetime values ----
    cur = conn.cursor()
    #Iterate through values in df datetime column
    if datetime_clmn is not None:
        datetime_clmn = int(datetime_clmn)
        #set how to to interpret the the first value in an ambiguous 3-integer date
        #eu date format dd/mm/yyyy
        if date_format == "eu":
            first_val = "day"
        #us date format mm/dd/yyyy
        else:
            first_val = 'month'
        #add timestamp_gmt and timestamp_local columns
        datetime_clmn_gmt = "timestamp_gmt"
        datetime_clmn_local = "timestamp_local"
        dataframes[datetime_clmn_gmt] = np.nan
        dataframes[datetime_clmn_local] = np.nan

        item_row_nr = 0
        for item in dataframes[dataframes.columns[datetime_clmn]]:
            timestamps = datefinder.find_dates(str(item), first=first_val)
            for timestamp in timestamps:
                #If the timestamp is timezone aware
                if timestamp.tzinfo is not None:
                    timestamp_utc = timestamp.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp, "%Y-%m-%d %H:%M:%S.%f %Z")
                #If the timestamp is timezone naive
                else:
                    local_timezone = pytz.timezone(timezone)
                    timestamp_local_tz = local_timezone.localize(timestamp,
                                                                 is_dst=True)
                    timestamp_utc = timestamp_local_tz.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp_local_tz, "%Y-%m-%d %H:%M:%S.%f %Z")

            #insert timestamp_utc and timezone_local values to the newly created df columns
            row_index = dataframes.index[item_row_nr]
            dataframes.loc[row_index, datetime_clmn_gmt] = timestamp_utc
            dataframes.loc[row_index, datetime_clmn_local] = timestamp_local

            item_row_nr += 1

    cur.execute(sql_select_athlete_id, (ath_un, ))
    result = cur.fetchone()
    conn.commit()
    ath_un_id = result[0]
    dataframes['athlete_id'] = ath_un_id
    cur.close()

    #format column names to comply with postgres rules
    dataframes.rename(columns={i: format_column_name(i)
                               for i in df.columns},
                      inplace=True)

    #Get list of unique column names from column numbers (suplied by user) and add athlete_id column to the list
    for i in unique:
        x = unique.index(i)
        unique[x] = dataframes.columns[i]
    unique.append("athlete_id")

    #Check if the table already exists
    cur = conn.cursor()
    cur.execute(
        "SELECT EXISTS (SELECT FROM pg_tables WHERE tablename  = '{}');".
        format(table_name))
    result = cur.fetchone()
    conn.commit()
    if result[0] is True:
        with StdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        pass
    else:
        with StdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        cur.execute(gen_tbl_sql(ath_un, dataframes, table_name, unique))
        conn.commit()
        cur.close()

    insert_df_values(ath_un, conn, dataframes, table_name, unique)

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
コード例 #19
0
def update_autosynch_prefrnc(ath_un, db_host, db_name, superuser_un,
                             superuser_pw, encrypted_superuser_pw,
                             enable_auto_synch, encr_pass):

    ath_user = (ath_un, )

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    #Do not save pw to db_info if no autosynch or db_host=localhost
    if enable_auto_synch == True:
        if db_host != 'localhost':
            encrypted_superuser_pw = (encrypted_superuser_pw, )
        else:
            encrypted_superuser_pw = None
    else:
        encrypted_superuser_pw = None

    enable_auto_synch = (enable_auto_synch, )
    conn = None

    sql = """

    UPDATE athlete SET auto_sync = %s where ath_un= %s;
            
    """

    sql_postgres_db = """
    
    UPDATE db_info SET db_auto_synch = %s,db_un = %s,db_pw = %s where db_name= %s;

    """
    #Insert user autosynch preference into user db athlete table
    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Auto_Sych preferences into postgreSQL:')
        cur.execute(sql, (enable_auto_synch, ath_user))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    #Insert user autosynch preference into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print(
                'Inserting Auto_Sych preferences into postgres/db_info table:')

        cur_localhost.execute(
            sql_postgres_db,
            (enable_auto_synch, superuser_un, encrypted_superuser_pw, db_name))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
コード例 #20
0
def create_user_db(ath_un, ath_pw, db_host, db_name, superuser_un,
                   superuser_pw, encrypted_superuser_pw, save_pwd, encr_pass):
    conn = None
    head, sep, tail = ath_un.partition('@')
    db_username = head

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql_get_lc_collate = "SHOW LC_COLLATE"
    sql_create_db = "CREATE DATABASE \"" + db_name + "\" WITH TEMPLATE = template0 ENCODING = 'UTF8' LC_COLLATE = %s LC_CTYPE = %s;"
    sql_create_dbuser = "******"" + db_username + "\" WITH PASSWORD \'" + ath_pw + "\';"
    sql_update_db_info_record = "UPDATE db_info SET db_name = %s, db_host = %s, db_un = %s, db_pw = %s, db_auto_synch = %s WHERE ath_un= %s; "

    try:
        conn = psycopg2.connect(dbname='postgres',
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)
        conn.autocommit = True

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Creating user database...')

        cur.execute(sql_get_lc_collate)
        lc_collate = cur.fetchone()
        cur.execute(sql_create_db, (lc_collate[0], lc_collate[0]))
        cur.execute(sql_create_dbuser)

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()

    #Insert user db connection info into postgres/db_info table
    try:
        params = config(filename="encrypted_settings.ini",
                        section="postgresql",
                        encr_pass=encr_pass)
        postgres_db = params.get("database")
        postgres_un = params.get("user")
        postgres_pw = params.get("password")

        conn_localhost = psycopg2.connect(dbname=postgres_db,
                                          user=postgres_un,
                                          password=postgres_pw)
        conn_localhost.autocommit = True

        # create a cursor
        cur_localhost = conn_localhost.cursor()

        # execute a statement
        with ProgressStdoutRedirection(ath_un):
            print('Inserting DB connection info...')

        cur_localhost.execute(sql_update_db_info_record,
                              (db_name, db_host, superuser_un,
                               encrypted_superuser_pw, save_pwd, ath_un))

        # close the communication with the PostgreSQL
        cur_localhost.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn_localhost is not None:
            conn_localhost.close
コード例 #21
0
def backup_user_db(db_name, ath_un, output, dbx_auth_token, encr_pass):

    if os.name == 'nt':
        backup_file_path = output + '\\' + ath_un + '\DB_Backup.gz'
    else:
        backup_file_path = output + '/' + ath_un + '/DB_Backup.gz'

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
    open(pidfile).close

    # read connection parameters
    params = config(filename="encrypted_settings.ini",
                    section="postgresql",
                    encr_pass=encr_pass)
    superuser_un = params.get("user")
    superuser_pw = params.get("password")
    db_host = params.get("host")

    try:
        with gzip.open(backup_file_path, 'wb') as f:
            with StdoutRedirection(ath_un):
                print('Backing up DB, please wait....')
            with ProgressStdoutRedirection(ath_un):
                print('Backing up DB, please wait....')

            # create .pgpass in user's AppData or home directory
            if os.name == 'nt':
                appdata_dir = os.getenv('APPDATA')
                pgpass_dir = os.path.join(appdata_dir, "postgresql")
                pgpassfile = os.path.join(pgpass_dir, "pgpass.conf")
                if not os.path.exists(pgpass_dir):
                    os.makedirs(pgpass_dir)
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
            else:
                pgpassfile = os.path.expanduser('~') + '/.pgpass'
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
                    os.chmod(pgpassfile, 0o600)
            # execute pg_dump
            popen = subprocess.Popen(
                ['pg_dump', '-h', db_host, '-U', superuser_un, db_name],
                stdout=subprocess.PIPE,
                universal_newlines=True)
            for stdout_line in iter(popen.stdout.readline, ""):
                f.write(stdout_line.encode())
            popen.stdout.close()
            popen.wait()
            try:
                os.remove(pgpassfile)
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
            with StdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
            with ProgressStdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(e)))

    today = datetime.datetime.today().strftime('%Y%m%d')
    download_folder_dbx = 'DB_Backup'
    file_name_to = today + '_DB_Backup.gz'

    dbx_file_exists = check_if_file_exists_in_dbx(file_name_to, dbx_auth_token,
                                                  download_folder_dbx,
                                                  encr_pass)
    if dbx_file_exists == True:
        with StdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
        with ProgressStdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
    else:
        try:
            with StdoutRedirection(ath_un):
                print(('Uploading ' + file_name_to + ' Dropbox, please wait.'))
            download_files_to_dbx(backup_file_path, file_name_to,
                                  dbx_auth_token, download_folder_dbx,
                                  encr_pass)
            with StdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
            with ProgressStdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))
コード例 #22
0
def retrieve_decrypt_creds(synch_req_db_list, encr_pass, full_synch=False):
    sql_get_creds = """
    SELECT 
        gc_email,gc_password,mfp_username,mfp_password,diasend_username,diasend_password,dropbox_access_token,glimp_export_link,libreview_export_link,mm_export_link,oura_refresh_token,strava_refresh_token,ath_un
    FROM
        athlete;
    """

    sql_select_dbsu_creds = """
    SELECT
        db_host,db_un,db_pw
    FROM
        db_info
    WHERE
       db_name = %s;
    """

    for row in synch_req_db_list:
        for db in row:
            try:
                #Reset all user variables
                conn = None
                ath_un = None

                gc_un = None
                gc_encr_pw = None
                gc_decr_pw = None

                mfp_un = None
                mfp_encr_pw = None
                mfp_decr_pw = None

                cgm_un = None
                cgm_encr_pw = None
                cgm_decr_pw = None

                dbx_encr_token = None
                dbx_decr_token = None

                oura_encr_token = None
                oura_decr_token = None

                strava_encr_token = None
                strava_decr_token = None

                glimp_encr_export_link = None
                glimp_decr_export_link = None

                libreview_encr_export_link = None
                libreview_decr_export_link = None

                mm_encr_export_link = None
                mm_decr_export_link = None

                # read connection parameters
                dbsu_params = config(filename="encrypted_settings.ini",
                                     section="postgresql",
                                     encr_pass=encr_pass)
                superuser_un = dbsu_params.get("user")
                superuser_pw = dbsu_params.get("password")

                dbsu_conn = psycopg2.connect(dbname='postgres',
                                             user=superuser_un,
                                             password=superuser_pw)
                dbsu_cur = dbsu_conn.cursor()
                dbsu_cur.execute(sql_select_dbsu_creds, (db, ))
                dbsu_conn.commit()
                dbsu_result = dbsu_cur.fetchone()
                db_host = dbsu_result[0]
                db_un = dbsu_result[1]
                db_pw = dbsu_result[2]

                if db_host != 'localhost':  #User database is hosted remotely
                    superuser_un = db_un
                    superuser_pw = decrypt(base64.b64decode(db_pw), encr_pass)
            except (Exception, psycopg2.DatabaseError) as error:
                with ConsolidatedProgressStdoutRedirection():
                    print('Autosynch DB Error: ' + str(error))
            finally:
                if dbsu_conn is not None:
                    dbsu_conn.close()

            try:
                # connect to the PostgreSQL server
                conn = psycopg2.connect(dbname=db,
                                        host=db_host,
                                        user=superuser_un,
                                        password=superuser_pw)
                cur = conn.cursor()
                cur.execute(sql_get_creds)
                conn.commit()
                result = cur.fetchone()
                gc_un = result[0]
                gc_encr_pw = result[1]
                mfp_un = result[2]
                mfp_encr_pw = result[3]
                cgm_un = result[4]
                cgm_encr_pw = result[5]
                dbx_encr_token = result[6]
                glimp_encr_export_link = result[7]
                libreview_encr_export_link = result[8]
                mm_encr_export_link = result[9]
                oura_encr_token = result[10]
                strava_encr_token = result[11]
                ath_un = result[12]

                #Now decrypt (gc_encr_pw,mfp_encr_pw,dbx_encr_token and others)
                if gc_encr_pw is not None:
                    gc_decr_pw = decrypt(base64.b64decode(gc_encr_pw),
                                         encr_pass)
                if mfp_encr_pw is not None:
                    mfp_decr_pw = decrypt(base64.b64decode(mfp_encr_pw),
                                          encr_pass)
                if cgm_encr_pw is not None:
                    cgm_decr_pw = decrypt(base64.b64decode(cgm_encr_pw),
                                          encr_pass)
                if dbx_encr_token is not None:
                    dbx_decr_token = decrypt(base64.b64decode(dbx_encr_token),
                                             encr_pass)
                if glimp_encr_export_link is not None:
                    glimp_decr_export_link = decrypt(
                        base64.b64decode(glimp_encr_export_link), encr_pass)
                if libreview_encr_export_link is not None:
                    libreview_decr_export_link = decrypt(
                        base64.b64decode(libreview_encr_export_link),
                        encr_pass)
                if mm_encr_export_link is not None:
                    mm_decr_export_link = decrypt(
                        base64.b64decode(mm_encr_export_link), encr_pass)
                if oura_encr_token is not None:
                    oura_decr_token = decrypt(
                        base64.b64decode(oura_encr_token), encr_pass)
                if strava_encr_token is not None:
                    strava_decr_token = decrypt(
                        base64.b64decode(strava_encr_token), encr_pass)

                ###Execute auto synch from "main_data_autosynch.py"###
                auto_synch(ath_un, db, db_host, superuser_un, superuser_pw,
                           gc_un, gc_decr_pw, mfp_un, mfp_decr_pw, cgm_un,
                           cgm_decr_pw, glimp_decr_export_link,
                           libreview_decr_export_link, mm_decr_export_link,
                           dbx_decr_token, oura_decr_token, strava_decr_token,
                           encr_pass, full_synch)

            except (Exception, psycopg2.DatabaseError) as error:
                with ConsolidatedProgressStdoutRedirection():
                    print(('Autosynch DB Error: ' + str(error)))

            finally:
                if conn is not None:
                    conn.close()