コード例 #1
0
def delete_all_db_data(ath_un, mfp_username, db_host, db_name, superuser_un,
                       superuser_pw, encr_pass):
    db_name = (db_name)
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """
         SELECT truncate_schema('public');
         """
    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with StdoutRedirection(ath_un):
            print(('Deleting DB data for user: '******'....'))
        cur.execute(sql)
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()

    except Exception as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print(('All DB data for user: '******' deleted successfully.'))
            with ProgressStdoutRedirection(ath_un):
                print(('All DB data for user: '******' deleted successfully.'))
コード例 #2
0
def insert_df_values(ath_un, conn, datafrm, table, unique):
    # Creating a list of tupples from the dataframe values
    tpls = [tuple(x) for x in datafrm.to_numpy()]
    # dataframe columns with Comma-separated
    cols = ','.join(list(datafrm.columns))
    unique = ','.join(unique)

    # SQL query to execute
    sql = "INSERT INTO %s(%s) VALUES %%s ON CONFLICT (%s) DO NOTHING;" % (
        table, cols, unique)
    cur = conn.cursor()
    try:
        psycopg2.extras.execute_values(cur, sql, tpls)
        conn.commit()
        with StdoutRedirection(ath_un):
            print('Custom data succesfully inserted to: {}'.format(table))
        with ProgressStdoutRedirection(ath_un):
            print('Custom data succesfully inserted to: {}'.format(table))
    except (Exception, psycopg2.DatabaseError) as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(e)))

        cur.close()
コード例 #3
0
def delete_all_files(output, ath_un):
    folder = os.path.join(output, ath_un)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    for the_file in os.listdir(folder):
        file_path = os.path.join(folder, the_file)
        if os.path.isfile(file_path):
            os.unlink(file_path)
        elif os.path.isdir(file_path):
            shutil.rmtree(file_path)
    with StdoutRedirection(ath_un):
        print(('All XML,JSON and FIT files for user: '******' deleted successfully.'))
    with ProgressStdoutRedirection(ath_un):
        print(('All XML,JSON and FIT files for user: '******' deleted successfully.'))
コード例 #4
0
def backup_user_db(db_name, ath_un, output, dbx_auth_token, encr_pass):

    if os.name == 'nt':
        backup_file_path = output + '\\' + ath_un + '\DB_Backup.gz'
    else:
        backup_file_path = output + '/' + ath_un + '/DB_Backup.gz'

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
    open(pidfile).close

    # read connection parameters
    params = config(filename="encrypted_settings.ini",
                    section="postgresql",
                    encr_pass=encr_pass)
    superuser_un = params.get("user")
    superuser_pw = params.get("password")
    db_host = params.get("host")

    try:
        with gzip.open(backup_file_path, 'wb') as f:
            with StdoutRedirection(ath_un):
                print('Backing up DB, please wait....')
            with ProgressStdoutRedirection(ath_un):
                print('Backing up DB, please wait....')

            # create .pgpass in user's AppData or home directory
            if os.name == 'nt':
                appdata_dir = os.getenv('APPDATA')
                pgpass_dir = os.path.join(appdata_dir, "postgresql")
                pgpassfile = os.path.join(pgpass_dir, "pgpass.conf")
                if not os.path.exists(pgpass_dir):
                    os.makedirs(pgpass_dir)
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
            else:
                pgpassfile = os.path.expanduser('~') + '/.pgpass'
                if not os.path.exists(pgpassfile):
                    open(pgpassfile,
                         'w+').write(db_host + ':5432:*:postgres:' +
                                     superuser_pw)
                    open(pgpassfile).close
                    os.chmod(pgpassfile, 0o600)
            # execute pg_dump
            popen = subprocess.Popen(
                ['pg_dump', '-h', db_host, '-U', superuser_un, db_name],
                stdout=subprocess.PIPE,
                universal_newlines=True)
            for stdout_line in iter(popen.stdout.readline, ""):
                f.write(stdout_line.encode())
            popen.stdout.close()
            popen.wait()
            try:
                os.remove(pgpassfile)
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
            with StdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
            with ProgressStdoutRedirection(ath_un):
                print(
                    'DB backup completed. The backup file will be uploaded to Dropbox now.'
                )
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(e)))

    today = datetime.datetime.today().strftime('%Y%m%d')
    download_folder_dbx = 'DB_Backup'
    file_name_to = today + '_DB_Backup.gz'

    dbx_file_exists = check_if_file_exists_in_dbx(file_name_to, dbx_auth_token,
                                                  download_folder_dbx,
                                                  encr_pass)
    if dbx_file_exists == True:
        with StdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
        with ProgressStdoutRedirection(ath_un):
            print((file_name_to + ' already exists in Dropbox, skipping.'))
    else:
        try:
            with StdoutRedirection(ath_un):
                print(('Uploading ' + file_name_to + ' Dropbox, please wait.'))
            download_files_to_dbx(backup_file_path, file_name_to,
                                  dbx_auth_token, download_folder_dbx,
                                  encr_pass)
            with StdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
            with ProgressStdoutRedirection(ath_un):
                print('DB Backup file uploaded to Dropbox successfuly')
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))
コード例 #5
0
    def parse_dataset(dataset, conn, start_date_dt, end_date_dt, data_source):
        for row in dataset.itertuples():
            glucose_nmol = None
            glucose_nmol_avrg = None
            ketone_nmol = None
            utc_time_str = None
            if data_source == 'glimp_android':
                record_type = row._9  #Current(5) or 15min average(3)
                if record_type == 5:
                    glucose_nmol = round((float(row._6) / 18), 2)
                else:
                    glucose_nmol_avrg = round((float(row._6) / 18), 2)
                local_time = row._2
                epoch_time = row._3
                local_time_dt = datetime.datetime.strptime((local_time),
                                                           "%d/%m/%Y %H.%M.%S")
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                utc_time_str = datetime.datetime.utcfromtimestamp(
                    epoch_time).strftime('%Y-%m-%d %H:%M:%S')
            else:
                ketone_nmol = row._7
                glucose_nmol = row._6
                glucose_nmol_avrg = row._5
                local_time = row._3
                local_time_dt = local_time.to_pydatetime(
                )  #Convert from pandas to python datetime
                local_time_str = datetime.datetime.strftime(
                    (local_time_dt), "%Y-%m-%d %H:%M:%S")
                local_date_str = datetime.datetime.strftime((local_time_dt),
                                                            "%Y-%m-%d")
                try:
                    # create a cursor
                    cur = conn.cursor()
                    # execute a statement
                    cur.execute(sql_local2gmt, (local_date_str, ))
                    result = cur.fetchone()
                    # close the communication with the PostgreSQL
                    conn.commit()
                    cur.close()
                    if result is not None:
                        utc_time_dt = local_time_dt - result[0]
                        utc_time_str = datetime.datetime.strftime(
                            (utc_time_dt), "%Y-%m-%d %H:%M:%S")
                except (Exception, psycopg2.DatabaseError) as error:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + '  ' + str(error)))

            # Compare if the Record Date is within the download Date range. This only works if parsed from top of the csv file.
            # The most recent record at the top
            if local_time_dt > end_date_dt + datetime.timedelta(days=1):
                continue
            if local_time_dt < start_date_dt:
                break

            with ProgressStdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))
            with StdoutRedirection(ath_un):
                print(
                    ('Inserting BG record from Glimp/LibreView. Time: ' +
                     str(local_time_str) + '  Glucose: ' + str(glucose_nmol)))

            try:
                # create a cursor
                cur = conn.cursor()
                # execute a statement
                cur.execute(
                    sql,
                    (ath_un, local_time_str, glucose_nmol, glucose_nmol_avrg,
                     ketone_nmol, data_source, utc_time_str))
                conn.commit()
                # close the communication with the PostgreSQL
                cur.close()

            except (Exception, psycopg2.DatabaseError) as error:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + '  ' + str(error)))
コード例 #6
0
def dwnld_insert_strava_data(ath_un, db_host, db_name, superuser_un,
                             superuser_pw, strava_refresh_token, start_date_dt,
                             end_date_dt, save_pwd, encr_pass):

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    strava_params = config(filename="encrypted_settings.ini",
                           section="strava",
                           encr_pass=encr_pass)
    STRAVA_CLIENT_ID = str(strava_params.get("strava_client_id"))
    STRAVA_CLIENT_SECRET = str(strava_params.get("strava_client_secret"))
    STRAVA_TOKEN_URL = str(strava_params.get("strava_token_url"))

    sql_insert_strava_refresh_token = """

        DO
            $do$
                BEGIN
                    IF EXISTS (SELECT id FROM athlete WHERE ath_un = %s) THEN
                        UPDATE athlete SET strava_refresh_token = %s where ath_un= %s;
                    END IF;
                END
            $do$

    """
    sql_insert_activity_summary = """

        INSERT INTO strava_activity_summary(athlete_id,strava_activity_id,strava_athlete_id,name,distance,moving_time,elapsed_time,total_elevation_gain,type,
        workout_type,external_id,upload_id,start_date,start_date_local,timezone,utc_offset,start_latitude,start_longitude,end_latitude,end_longitude,location_city,
        location_state,location_country,map,summary_polyline,trainer,commute,manual,gear_id,average_speed,max_speed,average_cadence,average_temp,average_watts,
        weighted_average_watts,kilojoules,device_watts,average_heartrate,max_heartrate,max_watts,elev_high,elev_low,suffer_score)

        VALUES
        ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (start_date) DO NOTHING;
        
    """

    sql_insert_activity_streams = """

        INSERT INTO strava_activity_streams(activity_id,altitude,cadence,distance,grade_smooth,heartrate,latitude,longitude,moving,temp,time_gmt,velocity_smooth,watts)

        VALUES
        ((select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (time_gmt) DO NOTHING;
    """

    sql_insert_utc_offset = """
        INSERT INTO gmt_local_time_difference (athlete_id,local_date,local_midnight_timestamp,gmt_midnight_timestamp,gmt_local_difference)

        VALUES
            ((select id from athlete where ath_un=%s),%s,%s,%s,%s)

        ON CONFLICT (local_date) DO NOTHING;
        """

    sql_timezone = """
        INSERT INTO timezones(gc_activity_id,strava_activity_id,timestamp_local,timestamp_gmt,timezone,long_degr,lat_degr,alt_avrg,end_time_gmt)
        
        VALUES
        (null,(select id from strava_activity_summary where strava_activity_id=%s),%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (timestamp_gmt,long_degr,lat_degr) DO NOTHING;
        """

    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    def refresh_oauth_tokens():
        #Refresh the access token
        payload = dict(grant_type='refresh_token',
                       refresh_token=strava_refresh_token,
                       client_id=STRAVA_CLIENT_ID,
                       client_secret=STRAVA_CLIENT_SECRET)
        refresh = requests.post(STRAVA_TOKEN_URL, data=payload)
        response = refresh.json()
        oauth_token = response['access_token']
        refresh_token = response['refresh_token']
        expires_at = response['expires_at']

        if save_pwd == True:
            encrypted_refresh_token = base64.b64encode(
                encrypt(refresh_token, encr_pass))
            encrypted_refresh_token = encrypted_refresh_token.decode('utf-8')
        else:
            encrypted_refresh_token = None

        try:
            cur = conn.cursor()
            cur.execute(sql_insert_strava_refresh_token,
                        (ath_un, encrypted_refresh_token, ath_un))
            conn.commit()
            cur.close()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

        return oauth_token, expires_at

    oauth_token, expires_at = refresh_oauth_tokens()
    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
    epoch_start_date = start_date_dt.timestamp()
    epoch_end_date = end_date_dt.timestamp()

    #Loop through all activities
    page = 1
    activities_url = "https://www.strava.com/api/v3/activities"

    while True:
        # Retry 3 times if request fails
        tries = 3
        for i in range(tries):
            try:
                #Check if the oauth token is stil valid, and refresh if not.
                if int(datetime.now().timestamp()) > expires_at:
                    oauth_token, expires_at = refresh_oauth_tokens()
                    header = {'Authorization': 'Bearer {}'.format(oauth_token)}
                    with ProgressStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            'The Strava oauth token expired and has been refreshed'
                        )
                # get page of 200 activities from Strava
                r = requests.get(
                    '{}?before={}&after={}&page={}&per_page=200'.format(
                        activities_url, epoch_end_date, epoch_start_date,
                        str(page)),
                    headers=header)
                sleep_sec = api_rate_limits(r)
            except Exception as e:
                if i < tries - 1:
                    time.sleep(10)
                    continue
                else:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.now()) + ' [' +
                               sys._getframe().f_code.co_name + ']' +
                               ' Error on line {}'.format(
                                   sys.exc_info()[-1].tb_lineno) + '  ' +
                               str(e)))
                    raise
            break

        if sleep_sec == 0:
            pass
        else:
            time.sleep(sleep_sec)
            pass

        # if no results then exit loop
        if (not r.json()):
            if conn is not None:
                conn.close()
                with StdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
                with ProgressStdoutRedirection(ath_un):
                    print('Strava Activities Inserted Successfully')
            break

        #Do something with the response/data
        activity_data = json.loads(r.text)

        #assign activity values to variables
        for activity in activity_data:
            if "id" in activity:
                strava_activity_id = activity['id']
                #PG: Check whether the data for this activity have been inserted into to DB during one of the previous runs
                data_exist_for_activity = 'Strava activity {} data'.format(
                    strava_activity_id)
                data_exists = check_data_file_exists(data_exist_for_activity,
                                                     ath_un, db_host, db_name,
                                                     superuser_un,
                                                     superuser_pw, encr_pass)
                if data_exists == True:
                    with StdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping.'
                            .format(strava_activity_id)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'Strava activity {} data already downloaded and inserted to DB. Skipping'
                            .format(strava_activity_id)))
                    continue
            else:
                strava_activity_id = None
            if "id" in activity['athlete']:
                strava_athlete_id = activity['athlete']['id']
            else:
                strava_athlete_id = None
            if "name" in activity:
                name = activity['name']
            else:
                name = None
            if "distance" in activity:
                distance = activity['distance']
            else:
                distance = None
            if "moving_time" in activity:
                moving_time = activity['moving_time']
            else:
                moving_time = None
            if "elapsed_time" in activity:
                elapsed_time = activity['elapsed_time']
            else:
                elapsed_time = None
            if "total_elevation_gain" in activity:
                total_elevation_gain = activity['total_elevation_gain']
            else:
                total_elevation_gain = None
            if "type" in activity:
                type = activity['type']
            else:
                type = None
            if "workout_type" in activity:
                workout_type = activity['workout_type']
            else:
                workout_type = None
            if "external_id" in activity:
                external_id = activity['external_id']
            else:
                external_id = None
            if "upload_id" in activity:
                upload_id = activity['upload_id']
            else:
                upload_id = None
            if "start_date" in activity:
                start_date = activity['start_date']
                start_date_dt = datetime.strptime(start_date,
                                                  '%Y-%m-%dT%H:%M:%SZ')
                start_date = datetime.strftime(start_date_dt,
                                               '%Y-%m-%d %H:%M:%S')
            else:
                start_date = None
            if "start_date_local" in activity:
                start_date_local = activity['start_date_local']
                start_date_local_dt = datetime.strptime(
                    start_date_local, '%Y-%m-%dT%H:%M:%SZ')
                start_date_local = datetime.strftime(start_date_local_dt,
                                                     '%Y-%m-%d %H:%M:%S')
            else:
                start_date_local = None
            if "timezone" in activity:
                timezone = activity['timezone']
            else:
                timezone = None
            if "utc_offset" in activity:
                utc_offset = activity['utc_offset']
            else:
                utc_offset = None
            if "start_latlng" in activity:
                start_latlng = activity['start_latlng']
                #if start_latlng is not None: #Changed 2021/10/29
                if start_latlng:
                    start_latitude = start_latlng[0]
                    start_longitude = start_latlng[1]
                else:
                    start_latitude = None
                    start_longitude = None
            else:
                start_latitude = None
                start_longitude = None
            if "end_latlng" in activity:
                end_latlng = activity['end_latlng']
                #if end_latlng is not None: #Changed 2021/10/29
                if end_latlng:
                    end_latitude = end_latlng[0]
                    end_longitude = end_latlng[1]
                else:
                    end_latitude = None
                    end_longitude = None
            else:
                end_latitude = None
                end_longitude = None
            if "location_city" in activity:
                location_city = activity['location_city']
            else:
                location_city = None
            if "location_state" in activity:
                location_state = activity['location_state']
            else:
                location_state = None
            if "location_country" in activity:
                location_country = activity['location_country']
            else:
                location_country = None
            if "id" in activity['map']:
                map = activity['map']['id']
            else:
                map = None
            if "summary_polyline" in activity['map']:
                summary_polyline = activity['map']['summary_polyline']
            else:
                summary_polyline = None
            if "trainer" in activity:
                trainer = activity['trainer']
            else:
                trainer = None
            if "commute" in activity:
                commute = activity['commute']
            else:
                commute = None
            if "manual" in activity:
                manual = activity['manual']
            else:
                manual = None
            if "gear_id" in activity:
                gear_id = activity['gear_id']
            else:
                gear_id = None
            if "average_speed" in activity:
                average_speed = activity['average_speed']
            else:
                average_speed = None
            if "max_speed" in activity:
                max_speed = activity['max_speed']
            else:
                max_speed = None
            if "average_cadence" in activity:
                average_cadence = activity['average_cadence']
            else:
                average_cadence = None
            if "average_temp" in activity:
                average_temp = activity['average_temp']
            else:
                average_temp = None
            if "average_watts" in activity:
                average_watts = activity['average_watts']
            else:
                average_watts = None
            if "weighted_average_watts" in activity:
                weighted_average_watts = activity['weighted_average_watts']
            else:
                weighted_average_watts = None
            if "kilojoules" in activity:
                kilojoules = activity['kilojoules']
            else:
                kilojoules = None
            if "device_watts" in activity:
                device_watts = activity['device_watts']
            else:
                device_watts = None
            if "average_heartrate" in activity:
                average_heartrate = activity['average_heartrate']
            else:
                average_heartrate = None
            if "max_heartrate" in activity:
                max_heartrate = activity['max_heartrate']
            else:
                max_heartrate = None
            if "max_watts" in activity:
                max_watts = activity['max_watts']
            else:
                max_watts = None
            if "elev_high" in activity:
                elev_high = activity['elev_high']
            else:
                elev_high = None
            if "elev_low" in activity:
                elev_low = activity['elev_low']
            else:
                elev_low = None
            if "suffer_score" in activity:
                suffer_score = activity['suffer_score']
            else:
                suffer_score = None

            with StdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))
            with ProgressStdoutRedirection(ath_un):
                print('Downloading Strava activity {} from: {}'.format(
                    strava_activity_id, start_date_local))

            #Insert activity summary data to DB
            try:
                cur = conn.cursor()
                cur.execute(
                    sql_insert_activity_summary,
                    (ath_un, strava_activity_id, strava_athlete_id, name,
                     distance, moving_time, elapsed_time, total_elevation_gain,
                     type, workout_type, external_id, upload_id, start_date,
                     start_date_local, timezone, utc_offset, start_latitude,
                     start_longitude, end_latitude, end_longitude,
                     location_city, location_state, location_country, map,
                     summary_polyline, trainer, commute, manual, gear_id,
                     average_speed, max_speed, average_cadence, average_temp,
                     average_watts, weighted_average_watts, kilojoules,
                     device_watts, average_heartrate, max_heartrate, max_watts,
                     elev_high, elev_low, suffer_score))
                conn.commit()
                cur.close()
                # Update the files table
                data_file_path_insert(data_exist_for_activity, ath_un, db_host,
                                      db_name, superuser_un, superuser_pw,
                                      encr_pass)

                #Insert utc offset into gmt_local_time_difference table if the record not already present from gc
                gmt_local_difference = timedelta(seconds=utc_offset)
                local_date = start_date_local_dt.date()
                local_midnight = start_date_local_dt.replace(hour=00,
                                                             minute=00,
                                                             second=00)
                gmt_midnight = local_midnight - gmt_local_difference

                cur = conn.cursor()
                cur.execute(sql_insert_utc_offset,
                            (ath_un, local_date, local_midnight, gmt_midnight,
                             gmt_local_difference))
                conn.commit()
                cur.close()

                #Insert timezone info into timezones table if the record not already present from gc
                if start_latitude is not None:
                    #Calculate avereage altitude for a better weather data accuracy
                    avg_altitude = int((elev_high + elev_low) / 2)
                    #Drop (GMT+hh:mm) from timezone string
                    timezone = re.sub("[(].*[) ]", "", timezone)
                    #Calculate end_time_gmt_str
                    end_time_gmt_dt = start_date_dt + timedelta(
                        seconds=elapsed_time)
                    end_time_gmt_str = datetime.strftime((end_time_gmt_dt),
                                                         "%Y-%m-%d %H:%M:%S")
                    cur = conn.cursor()
                    with StdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    with ProgressStdoutRedirection(ath_un):
                        print(('Inserting timezone info: ' + str(timezone) +
                               ' and local tz timestamp:' + str(local_date)))
                    cur.execute(
                        sql_timezone,
                        (strava_activity_id, start_date_local, start_date,
                         timezone, start_longitude, start_latitude,
                         avg_altitude, end_time_gmt_str))
                    conn.commit()
                    cur.close()

            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

            #Get all available streams for the activity
            types = 'time,distance,latlng,altitude,velocity_smooth,heartrate,cadence,watts,temp,moving,grade_smooth'
            df_columns = {
                'activity_id', 'time_gmt', 'distance', 'latitude', 'longitude',
                'altitude', 'velocity_smooth', 'heartrate', 'cadence', 'watts',
                'temp', 'moving', 'grade_smooth'
            }
            df_columns = sorted(df_columns)

            # Retry 3 times if request fails
            tries = 3
            for i in range(tries):
                try:
                    #Check if the oauth token is stil valid, and refresh if not.
                    if int(datetime.now().timestamp()) > expires_at:
                        oauth_token, expires_at = refresh_oauth_tokens()
                        header = {
                            'Authorization': 'Bearer {}'.format(oauth_token)
                        }
                        with ProgressStdoutRedirection(ath_un):
                            print(
                                str(datetime.now()) +
                                'The Strava oauth token expired and has been refreshed'
                            )
                    streams = requests.get(
                        "https://www.strava.com/api/v3/activities/{}/streams?keys={}"
                        .format(strava_activity_id, types),
                        headers=header)
                    sleep_sec = api_rate_limits(streams)
                except Exception as e:
                    if i < tries - 1:
                        time.sleep(10)
                        continue
                    else:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + ' [' +
                                   sys._getframe().f_code.co_name + ']' +
                                   ' Error on line {}'.format(
                                       sys.exc_info()[-1].tb_lineno) + '  ' +
                                   str(e)))
                        raise
                break

            if sleep_sec == 0:
                pass
            else:
                time.sleep(sleep_sec)
                pass

            #TODO: if sleep_sec > 6hrs request a new access_token

            #Do something with the response/data

            activity_streams = json.loads(streams.text)
            #Check if there is data in activity_streams
            if 'message' in activity_streams:
                if "Resource Not Found" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' No streams retrieved for {}. Moving onto next activity.'
                            .format(strava_activity_id))
                    continue
                if "Rate Limit Exceeded" in activity_streams['message']:
                    with ErrorStdoutRedirection(ath_un):
                        print(
                            str(datetime.now()) +
                            ' Download Rate limit exceeded. This attempt did not get caught by api_rate_limits() function!'
                        )
                    #Usualy due to failed request on a very last attempt before limit, and subsequent succesfull retry.Too messy, move onto next activity and see what happens.
                    continue
            else:
                pass

            activity_streams_df = pd.DataFrame(columns=df_columns)

            for stream in activity_streams:
                if "time" in stream['type']:
                    activity_id_list = []
                    time_stream = stream['data']
                    for i in range(len(time_stream)):
                        time_stream[i] = start_date_dt + timedelta(
                            0, int(time_stream[i]))
                        time_stream[i] = datetime.strftime(
                            time_stream[i], '%Y-%m-%d %H:%M:%S')
                        activity_id_list.append(strava_activity_id)
                    activity_streams_df['activity_id'] = activity_id_list
                    activity_streams_df['time_gmt'] = time_stream

                if "distance" in stream['type']:
                    distance_stream = stream['data']
                    activity_streams_df['distance'] = distance_stream

                if "latlng" in stream['type']:
                    latlng_stream = stream['data']
                    latitude = []
                    longitude = []
                    if latlng_stream is not None:
                        for i in range(len(latlng_stream)):
                            latitude.append(latlng_stream[i][0])
                            longitude.append(latlng_stream[i][1])
                    activity_streams_df['latitude'] = latitude
                    activity_streams_df['longitude'] = longitude

                if "altitude" in stream['type']:
                    altitude_stream = stream['data']
                    activity_streams_df['altitude'] = altitude_stream

                if "velocity_smooth" in stream['type']:
                    velocity_smooth_stream = stream['data']
                    activity_streams_df[
                        'velocity_smooth'] = velocity_smooth_stream

                if "heartrate" in stream['type']:
                    heartrate_stream = stream['data']
                    activity_streams_df['heartrate'] = heartrate_stream

                if "cadence" in stream['type']:
                    cadence_stream = stream['data']
                    activity_streams_df['cadence'] = cadence_stream

                if "watts" in stream['type']:
                    watts_stream = stream['data']
                    activity_streams_df['watts'] = watts_stream

                if "temp" in stream['type']:
                    temp_stream = stream['data']
                    activity_streams_df['temp'] = temp_stream

                if "moving" in stream['type']:
                    moving_stream = stream['data']
                    activity_streams_df['moving'] = moving_stream

                if "grade_smooth" in stream['type']:
                    grade_smooth_stream = stream['data']
                    activity_streams_df['grade_smooth'] = grade_smooth_stream

            activity_streams_df = activity_streams_df.reindex(
                sorted(activity_streams_df.columns),
                axis=1)  # Sort df alphabeticaly
            activity_streams_df = activity_streams_df.where(
                pd.notnull(activity_streams_df), None)  # Replace NaN with None
            df2list = activity_streams_df.values.tolist()

            #Insert activity streams data to DB
            try:
                cur = conn.cursor()
                for sublist in df2list:
                    cur.execute(sql_insert_activity_streams, (sublist))
                    conn.commit()
                cur.close()
            except Exception as e:
                with ErrorStdoutRedirection(ath_un):
                    print((str(datetime.now()) + ' [' +
                           sys._getframe().f_code.co_name + ']' +
                           ' Error on line {}'.format(
                               sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

        # increment page
        page += 1
コード例 #7
0
def cstm_data_insert(ath_un, usr_params_str, usr_params_int, db_host,
                     superuser_un, superuser_pw):
    cstm_dbx_link = usr_params_str[0]
    download_folder = usr_params_str[1]
    download_folder = os.path.join(download_folder, 'CSTM_Temp_CSV')
    archive_folder = os.path.join(download_folder, 'Archive')
    table_name = usr_params_str[2]
    date_format = usr_params_str[3]
    timezone = usr_params_str[4]
    datetime_clmn = usr_params_str[5]
    unique = usr_params_int
    dataframes = []

    # Download the csv file from dropbox using share link
    with StdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    with ProgressStdoutRedirection(ath_un):
        print('Downloading custom csv files from: {}'.format(cstm_dbx_link))
    cstm_dbx_link = cstm_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.Will download as .zip file.
    table_name = 'cstm_{}'.format(table_name)
    response = urllib.request.urlopen(cstm_dbx_link)
    sniffer = csv.Sniffer()
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    archive_params = config(filename="encrypted_settings.ini",
                            section="archive")
    preserve_files = str(archive_params.get("preserve_files"))

    #Insert usr_params to athlete table..
    cstm_user_insert(ath_un, usr_params_str, usr_params_int, table_name,
                     db_host, db_name, superuser_un, superuser_pw)

    conn = None
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql_select_athlete_id = """
        SELECT id FROM athlete WHERE ath_un = %s;
    """

    # Extract the downloaded file in to variable
    downloadFile = io.BytesIO()
    downloadFile.write(response.read())
    downloadFile.seek(0)  #Move pointer to the beginning

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    #Insert the data from csv to dataframe
    if zipfile.is_zipfile(downloadFile):
        with zipfile.ZipFile(downloadFile, 'r') as z:
            #List the content of the zip file
            zipfile_content = z.namelist()
            #Iterate through list and only extract files
            for item in zipfile_content:
                z.extract(item, download_folder)
        downloadFile.close()
        for filename in os.listdir(download_folder):
            if os.path.isfile(os.path.join(download_folder, filename)):
                #Detect whether the csv file has a header.
                with open(os.path.join(download_folder, filename), 'r') as f:
                    has_header = sniffer.has_header(f.read())
                    if has_header:
                        f.seek(0)  #Move pointer to the beginning
                        data_count = csv_match_columns2data(ath_un, f)
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         usecols=data_count)
                    else:
                        df = pd.read_csv(os.path.join(download_folder,
                                                      filename),
                                         header=None)
                dataframes.append(df)
                f.close()
                #Archive csv files
                if preserve_files == "false":
                    #Remove the csv file from download folder
                    os.remove(os.path.join(download_folder, filename))
                    with StdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB.'
                            .format(filename)))
                else:
                    #Move the csv to archive folder and remove from download
                    if os.path.exists(os.path.join(archive_folder, filename)):
                        #Replace the existing file with the updated one
                        os.remove(os.path.join(archive_folder, filename))
                        os.rename(os.path.join(download_folder, filename),
                                  os.path.join(archive_folder, filename))
                        with StdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                        with ProgressStdoutRedirection(ath_un):
                            print((
                                'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                                .format(filename)))
                    else:
                        #Remove the csv file from download folder
                        os.remove(os.path.join(download_folder, filename))
    else:
        filename = re.search(r'([^\/]+)\?.*$', cstm_dbx_link).group(
            1)  #Exctract the filename from the link
        downloadFile.seek(0)  #Move pointer to the beginning
        #Detect whether the csv file has a header.
        wrapper = io.TextIOWrapper(downloadFile, encoding='utf-8')
        has_header = sniffer.has_header(wrapper.read())
        downloadFile.seek(0)  #Move pointer to the beginning
        if has_header:
            data_count = csv_match_columns2data(ath_un, wrapper)
            downloadFile.seek(0)  #Move pointer to the beginning
            df = pd.read_csv(downloadFile, usecols=data_count)
        else:
            df = pd.read_csv(downloadFile, header=None)
        #Archive downloaded File
        if preserve_files == "true":
            downloadFile.seek(0)  #Move pointer to the beginning
            with open(os.path.join(archive_folder, filename), 'wb') as f:
                f.write(downloadFile.read())
                with StdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
                with ProgressStdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                        .format(filename)))
        else:
            with StdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print(('The content of \"{}\" parsed and inserted into DB.'.
                       format(filename)))
        wrapper.close()
        dataframes.append(df)
        downloadFile.close()

    #Remove duplicates
    dataframes = pd.concat(dataframes).drop_duplicates().reset_index(drop=True)

    #---- Parse and format datetime values ----
    cur = conn.cursor()
    #Iterate through values in df datetime column
    if datetime_clmn is not None:
        datetime_clmn = int(datetime_clmn)
        #set how to to interpret the the first value in an ambiguous 3-integer date
        #eu date format dd/mm/yyyy
        if date_format == "eu":
            first_val = "day"
        #us date format mm/dd/yyyy
        else:
            first_val = 'month'
        #add timestamp_gmt and timestamp_local columns
        datetime_clmn_gmt = "timestamp_gmt"
        datetime_clmn_local = "timestamp_local"
        dataframes[datetime_clmn_gmt] = np.nan
        dataframes[datetime_clmn_local] = np.nan

        item_row_nr = 0
        for item in dataframes[dataframes.columns[datetime_clmn]]:
            timestamps = datefinder.find_dates(str(item), first=first_val)
            for timestamp in timestamps:
                #If the timestamp is timezone aware
                if timestamp.tzinfo is not None:
                    timestamp_utc = timestamp.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp, "%Y-%m-%d %H:%M:%S.%f %Z")
                #If the timestamp is timezone naive
                else:
                    local_timezone = pytz.timezone(timezone)
                    timestamp_local_tz = local_timezone.localize(timestamp,
                                                                 is_dst=True)
                    timestamp_utc = timestamp_local_tz.astimezone(pytz.UTC)
                    timestamp_utc = datetime.datetime.strftime(
                        timestamp_utc, "%Y-%m-%d %H:%M:%S.%f")
                    timestamp_local = datetime.datetime.strftime(
                        timestamp_local_tz, "%Y-%m-%d %H:%M:%S.%f %Z")

            #insert timestamp_utc and timezone_local values to the newly created df columns
            row_index = dataframes.index[item_row_nr]
            dataframes.loc[row_index, datetime_clmn_gmt] = timestamp_utc
            dataframes.loc[row_index, datetime_clmn_local] = timestamp_local

            item_row_nr += 1

    cur.execute(sql_select_athlete_id, (ath_un, ))
    result = cur.fetchone()
    conn.commit()
    ath_un_id = result[0]
    dataframes['athlete_id'] = ath_un_id
    cur.close()

    #format column names to comply with postgres rules
    dataframes.rename(columns={i: format_column_name(i)
                               for i in df.columns},
                      inplace=True)

    #Get list of unique column names from column numbers (suplied by user) and add athlete_id column to the list
    for i in unique:
        x = unique.index(i)
        unique[x] = dataframes.columns[i]
    unique.append("athlete_id")

    #Check if the table already exists
    cur = conn.cursor()
    cur.execute(
        "SELECT EXISTS (SELECT FROM pg_tables WHERE tablename  = '{}');".
        format(table_name))
    result = cur.fetchone()
    conn.commit()
    if result[0] is True:
        with StdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print(
                'The custom table {} already exists. Will now insert the new data'
                .format(table_name))
        pass
    else:
        with StdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        with ProgressStdoutRedirection(ath_un):
            print('Creating custom table: {}'.format(table_name))
        cur.execute(gen_tbl_sql(ath_un, dataframes, table_name, unique))
        conn.commit()
        cur.close()

    insert_df_values(ath_un, conn, dataframes, table_name, unique)

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
コード例 #8
0
def gc_wellness_insert(file_path, ath_un, db_host, db_name, superuser_un,
                       superuser_pw, encr_pass):
    db_name = (db_name)
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    #Create empty dataframe using sorted db fields as headers. This will be converted to empty dataframe and merged with xml dataframe
    db_fields_list = {
        'athlete_id', 'calendar_date', 'wellness_min_heart_rate',
        'wellness_total_steps_goal', 'sleep_sleep_duration',
        'wellness_floors_descended', 'wellness_floors_ascended',
        'wellness_bodybattery_drained', 'wellness_max_heart_rate',
        'wellness_total_distance', 'wellness_vigorous_intensity_minutes',
        'food_calories_consumed', 'wellness_user_intensity_minutes_goal',
        'wellness_common_active_calories', 'food_calories_remaining',
        'wellness_moderate_intensity_minutes', 'wellness_bmr_calories',
        'common_total_distance', 'wellness_active_calories',
        'wellness_total_calories', 'wellness_resting_heart_rate',
        'wellness_max_stress', 'wellness_average_steps',
        'wellness_max_avg_heart_rate', 'wellness_abnormalhr_alerts_count',
        'wellness_total_steps', 'wellness_average_stress',
        'common_total_calories', 'wellness_user_floors_ascended_goal',
        'wellness_min_avg_heart_rate', 'wellness_bodybattery_charged'
    }
    #Sort alphabeticaly
    db_fields_list = sorted(db_fields_list)
    db_df = pd.DataFrame(columns=db_fields_list)

    #List to store XML tags for xml_df column names
    column_list = []
    #List to store XML values in xml_df dataframe and used as params for SQL insert/update query
    xml_list = []

    xml_list.append(ath_un)
    column_list.append('athlete_id')

    #Parse the XML document, and append the data to column_list and xml_list lists.
    root = parse(file_path).getroot()
    for startDate in root.iter('statisticsStartDate'):
        date = startDate.text
        xml_list.append(date)
        column_list.append('calendar_date')
    for category in root.findall('.//metricsMap/*'):
        if category.tag != 'SLEEP_SLEEP_DURATION':
            column_list.append(category.tag.lower())
            for tag in root.iter(category.tag):
                for item in tag.iterfind('item'):
                    xml_list.append(item.findtext('value'))
        else:
            column_list.append(category.tag.lower())
            for tag in root.iter(category.tag):
                for item in tag.iterfind('item'):
                    if item.findtext('calendarDate') == date:
                        xml_list.append(item.findtext('value'))

    #Combine xml_list and column_list in a xml_df dataframe
    xml_df = pd.DataFrame(xml_list).T  # Write in DF and transpose it
    xml_df.columns = column_list  # Update column names
    xml_df = xml_df.reindex(sorted(xml_df.columns),
                            axis=1)  # Sort alphabeticaly
    #Combine the dataframes
    combined_df = db_df.append(xml_df)
    #Drop all columns that do not exist as DB fields
    combined_df = combined_df[db_fields_list]
    #Export all values to list
    df2list = combined_df.values.tolist()
    #Flatten the list
    df2list = [item for sublist in df2list for item in sublist]

    #Build a list of parameters to pass to sql query
    query_params = df2list + df2list
    query_params.append(date)

    # PG Amend path for postgres "pg_read_file()" function.
    text = file_path
    head, sep, tail = text.partition('temp/')
    pg_read_file = (tail, )

    list_sql = """
        INSERT INTO garmin_connect_wellness (athlete_id, calendar_date, common_total_calories, common_total_distance, food_calories_consumed, 
        food_calories_remaining, sleep_duration, wellness_abnormalhr_alerts_count, wellness_active_calories, wellness_average_steps, wellness_average_stress, 
        wellness_bmr_calories, wellness_body_battery_drained, wellness_bodybattery_charged, wellness_common_active_calories, wellness_floors_ascended, 
        wellness_floors_descended, wellness_max_avg_heart_rate, wellness_max_heart_rate, wellness_max_stress, wellness_min_avg_heart_rate, 
        wellness_min_heart_rate, wellness_moderate_intensity_minutes, wellness_resting_heart_rate, wellness_total_calories, wellness_total_distance, 
        wellness_total_steps, wellness_total_steps_goal, wellness_user_floors_ascended_goal, wellness_user_intensity_goal, wellness_vigorous_intensity_minutes)
        
        VALUES ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (calendar_date) DO UPDATE

        SET (athlete_id, calendar_date, common_total_calories, common_total_distance, food_calories_consumed, food_calories_remaining, sleep_duration, 
        wellness_abnormalhr_alerts_count, wellness_active_calories, wellness_average_steps, wellness_average_stress, wellness_bmr_calories, 
        wellness_body_battery_drained, wellness_bodybattery_charged, wellness_common_active_calories, wellness_floors_ascended, wellness_floors_descended, 
        wellness_max_avg_heart_rate, wellness_max_heart_rate, wellness_max_stress, wellness_min_avg_heart_rate, wellness_min_heart_rate, 
        wellness_moderate_intensity_minutes, wellness_resting_heart_rate, wellness_total_calories, wellness_total_distance, wellness_total_steps, 
        wellness_total_steps_goal, wellness_user_floors_ascended_goal, wellness_user_intensity_goal, wellness_vigorous_intensity_minutes)
        = ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        WHERE garmin_connect_wellness.calendar_date
        = %s;
        """

    try:
        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        with StdoutRedirection(ath_un):
            print('Inserting Wellness Data into postgreSQL:')
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Wellness Data into postgreSQL:')
        #cur.execute(xpath_sql,(athlete_id,pg_read_file,athlete_id,file2import,file2import,file2import))
        cur.execute(list_sql, (query_params))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print('Wellness Data Inserted Successfully')
            with ProgressStdoutRedirection(ath_un):
                print('Wellness Data Inserted Successfully')
コード例 #9
0
def gc_original_record_insert(file_path,activity_id,ath_un,db_host,db_name,superuser_un,superuser_pw, encr_pass):
    start=time.time()
    file2import = (file_path)
    gc_activity_id = (activity_id)
    ath_un = (ath_un)
    db_name = (db_name)
    activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat,position_long,speed,stance_time,stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power,left_pedal_smoothness,left_torque_effectiveness,power,right_pedal_smoothness,right_torque_effectiveness,temperature,avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,est_core_temp = [None]*38
    hrv_record_list = (None, None, None, None)
    hrv_record_list_combined = []
    hrv_rmssd = None
    hrv_sdrr = None
    hrv_pnn50 = None
    hrv_pnn20 = None										  
    fitfile = FitFile(file2import)

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)
 
    conn = None
    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name, host=db_host, user=superuser_un,password=superuser_pw)
    # two-phase insert prepapre
    conn.tpc_begin(conn.xid(42, 'transaction ID', ath_un))

    # Get all data messages that are of type record
    for record in fitfile.get_messages('record'):
    # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'activity_type':
                activity_type = record_data.value
            if record_data.name == 'altitude':
                altitude = record_data.value
            if record_data.name == 'cadence':
                cadence = record_data.value
            if record_data.name == 'distance':
                distance = record_data.value
            if record_data.name == 'enhanced_altitude':
                enhanced_altitude = record_data.value     
            if record_data.name == 'enhanced_speed':
                enhanced_speed = record_data.value
            if record_data.name == 'fractional_cadence':
                fractional_cadence = record_data.value
            if record_data.name == 'heart_rate':
                heart_rate = record_data.value
            if record_data.name == 'position_lat':
                position_lat = record_data.value
            if record_data.name == 'position_long':
                position_long = record_data.value
            if record_data.name == 'speed':
                speed = record_data.value
            if record_data.name == 'stance_time':
                stance_time = record_data.value
            if record_data.name == 'stance_time_balance':
                stance_time_balance = record_data.value
            if record_data.name == 'step_length':
                step_length = record_data.value
            if record_data.name == 'timestamp':
                timestamp = record_data.value
            if record_data.name == 'vertical_oscillation':
                vertical_oscillation = record_data.value
            if record_data.name == 'vertical_ratio':
                vertical_ratio = record_data.value
            if record_data.name == 'accumulated_power':
                accumulated_power = record_data.value
            if record_data.name == 'left_pedal_smoothness':
                left_pedal_smoothness = record_data.value
            if record_data.name == 'left_torque_effectiveness':
                left_torque_effectiveness = record_data.value
            if record_data.name == 'power':
                power = record_data.value
            if record_data.name == 'right_pedal_smoothness':
                right_pedal_smoothness = record_data.value
            if record_data.name == 'right_torque_effectiveness':
                right_torque_effectiveness = record_data.value
            if record_data.name == 'temperature':
                temperature = record_data.value
            if record_data.name == 'Est Core Temp':
                est_core_temp = record_data.value									   

        sql = """

            INSERT INTO garmin_connect_original_record (activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat
            ,position_long,speed,stance_time,stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power
            ,left_pedal_smoothness,left_torque_effectiveness,power,right_pedal_smoothness,right_torque_effectiveness,temperature,est_core_temp,lap_id)

            VALUES
            (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,(select id from garmin_connect_original_lap where timestamp >= %s and start_time < %s LIMIT 1))
            
            ON CONFLICT (timestamp,lap_id) DO NOTHING;


            """
        try:
            # create a cursor
            cur = conn.cursor()
            # execute a statement
            with StdoutRedirection(ath_un):
                print(('Inserting track record from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
            cur.execute(sql,(activity_type,altitude,cadence,distance,enhanced_altitude,enhanced_speed,fractional_cadence,heart_rate,position_lat,position_long,speed,stance_time,
                            stance_time_balance,step_length,timestamp,vertical_oscillation,vertical_ratio,accumulated_power,left_pedal_smoothness,left_torque_effectiveness,
                            power,right_pedal_smoothness,right_torque_effectiveness,temperature,est_core_temp,str(timestamp),str(timestamp)))

            cur.close()       
        except  (Exception, psycopg2.DatabaseError) as error:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(error)))

    # Get all data messages that are of type hrv
    for record in fitfile.get_messages('hrv'):
        # Go through all the data entries in this record
        for record_data in record:
            if record_data.name == 'time':
                hrv_record_list = record_data.value
                for i in hrv_record_list:
                    if i != None:
                        hrv_record = i
                        hrv_record_list_combined.append(hrv_record*1000)

                        sql_hrv = """
                        INSERT INTO garmin_connect_hrv_tracking(gc_activity_id,hrv)
                        VALUES
                        (%s,%s);
                        """
                        try:
                            # create a cursor
                            cur = conn.cursor()
                            # execute a statement
                            with StdoutRedirection(ath_un):
                                print(('Inserting hrv record from session: ' + str(gc_activity_id)))
                            cur.execute(sql_hrv,(gc_activity_id,hrv_record))
                            # close the communication with the PostgreSQL 
                            cur.close()              
                        except Exception as e:
                            with ErrorStdoutRedirection(ath_un):
                                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
                    else:
                        continue

    # two-phase insert commit or rollback
    try:
        conn.tpc_prepare()
    except  (Exception, psycopg2.DatabaseError) as error:
        conn.tpc_rollback()
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(error)))
    else:
        try:
            conn.tpc_commit()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    #PG: Add Pool Swim specific data
    for record in fitfile.get_messages('sport'):  
        for record_data in record:
            sport = record_data.value
            if sport == 'Pool Swim':
                with StdoutRedirection(ath_un):
                    print(('Activity ' + str(gc_activity_id) + ' is a Pool Swim. Inserting additional data'))  
                
                for record in fitfile.get_messages('length'):

                    for record_data in record:
                        if record_data.name == 'avg_speed':
                            avg_speed = record_data.value  
                        if record_data.name == 'avg_swimming_cadence':
                            avg_swimming_cadence = record_data.value  
                        if record_data.name == 'event_group':
                            event_group = record_data.value  
                        if record_data.name == 'event_type':
                            event_type = record_data.value    
                        if record_data.name == 'length_type':
                            length_type = record_data.value    
                        if record_data.name == 'message_index':
                            message_index = record_data.value     
                        if record_data.name == 'start_time':
                            start_time = record_data.value      
                        if record_data.name == 'swim_stroke':
                            swim_stroke = record_data.value     
                        if record_data.name == 'timestamp':
                            timestamp = record_data.value      
                        if record_data.name == 'total_calories':
                            total_calories = record_data.value       
                        if record_data.name == 'total_elapsed_time':
                            total_elapsed_time = record_data.value      
                        if record_data.name == 'total_strokes':
                            total_strokes = record_data.value       
                        if record_data.name == 'total_timer_time':
                            total_timer_time = record_data.value

                    sql_swim = """

                    CREATE TEMPORARY TABLE temp_garmin_connect_original_record(avg_speed real,avg_swimming_cadence int,event varchar,event_group varchar,event_type varchar,length_type varchar,message_index int
                                                ,start_time varchar,swim_stroke varchar,total_calories int,total_elapsed_time real,total_strokes int,total_timer_time real, timestamp varchar) ON COMMIT DROP;

                    INSERT INTO temp_garmin_connect_original_record (avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index
                                    ,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,timestamp)

                    VALUES

                    (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);

                    UPDATE garmin_connect_original_record
                    SET avg_speed = temp_garmin_connect_original_record.avg_speed,
                        avg_swimming_cadence = temp_garmin_connect_original_record.avg_swimming_cadence,
                        event = temp_garmin_connect_original_record.event,
                        event_group = temp_garmin_connect_original_record.event_group,
                        event_type = temp_garmin_connect_original_record.event_type,
                        length_type = temp_garmin_connect_original_record.length_type,
                        message_index = temp_garmin_connect_original_record.message_index,
                        start_time = temp_garmin_connect_original_record.start_time,
                        swim_stroke = temp_garmin_connect_original_record.swim_stroke,
                        total_calories = temp_garmin_connect_original_record.total_calories,
                        total_elapsed_time = temp_garmin_connect_original_record.total_elapsed_time,
                        total_strokes = temp_garmin_connect_original_record.total_strokes,
                        total_timer_time = temp_garmin_connect_original_record.total_timer_time
                    FROM temp_garmin_connect_original_record
                    WHERE temp_garmin_connect_original_record.timestamp = garmin_connect_original_record.timestamp and garmin_connect_original_record.gc_activity_id = %s;


                    """
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        with StdoutRedirection(ath_un):
                            print(('Inserting Pool swim data record from session: ' + str(gc_activity_id) + ' with timestamp:' + str(timestamp)))
                        cur.execute(sql_swim,(avg_speed,avg_swimming_cadence,event,event_group,event_type,length_type,message_index,start_time,swim_stroke,total_calories,total_elapsed_time,total_strokes,total_timer_time,timestamp,gc_activity_id))
                        conn.commit()
                        # close the communication with the PostgreSQL
                        cur.close()            
                    except Exception as e:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))
                    
    with StdoutRedirection(ath_un):
        print(('--- All record data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
    with ProgressStdoutRedirection(ath_un):
        print(('--- All record data for session: ' + str(gc_activity_id) + ' inserted successfully. ---'))
    #Correct Errors. Change all values with SD > 2 to mean
    hrv_record_list_combined = np.array(hrv_record_list_combined)
    hrv_record_list_combined[np.abs(zscore(hrv_record_list_combined)) > 2] = np.median(hrv_record_list_combined)

    #Calculate the square root of the mean square of the differences of RR-intervals
    hrv_rmssd = np.sqrt(np.mean(np.square(np.diff(hrv_record_list_combined))))
    #Calculate the standard deviation of the RR-intervals
    hrv_sdrr = np.std(hrv_record_list_combined)
    #Calculate the number of pairs of successive RR-intervals that differ by more than 50/20 ms
    nn50 = np.sum(np.abs(np.diff(hrv_record_list_combined)) > 50)*1
    nn20 = np.sum(np.abs(np.diff(hrv_record_list_combined)) > 20)*1
    #Calculate he proportion of NN50/NN20 divided by the total number of RR-intervals.
    hrv_pnn50 = 100 * nn50 / len(hrv_record_list_combined)
    hrv_pnn20 = 100 * nn20 / len(hrv_record_list_combined)

    sql_hrv_stats = '''
    UPDATE garmin_connect_original_session
    SET hrv_rmssd = %s,
        hrv_sdrr = %s,
        hrv_pnn50 = %s,
        hrv_pnn20 = %s
    WHERE gc_activity_id = %s;
    '''
    
    try:
        # create a cursor
        cur = conn.cursor()
        # execute a statement
        with StdoutRedirection(ath_un):
            print(('Inserting hrv stats for session: ' + str(gc_activity_id)))
        cur.execute(sql_hrv_stats,(hrv_rmssd,hrv_sdrr,hrv_pnn50,hrv_pnn20,gc_activity_id))
        conn.commit()
        # close the communication with the PostgreSQL 
        cur.close()
            
    except Exception as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' + sys._getframe().f_code.co_name + ']' + ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) + '  ' + str(e)))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()

    end = time.time()
    with ProgressStdoutRedirection(ath_un):
        print('\nExecution_time:')
    with ProgressStdoutRedirection(ath_un):
        print((end-start))
コード例 #10
0
def gc_bodycomposition_insert(file_path, ath_un, db_host, db_name,
                              superuser_un, superuser_pw, encr_pass):
    """ Connect to the PostgreSQL database server """
    athlete_id = (ath_un, )
    db_name = (db_name)
    body_water = []
    muscle_mass_gm = []
    visceral_fat = []
    weight_gm = []
    bmi = []
    body_fat = []
    physique_rating = []
    timestamp = []
    calendar_date = []
    metabolic_age = []
    bone_mass_gm = []
    caloric_intake = []
    source_type = []
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    # PG Amend path for postgres "pg_read_file()" function.
    text = file_path
    head, sep, tail = text.partition('temp/')
    pg_read_file = (tail, )

    sql = """

        INSERT INTO garmin_connect_body_composition(athlete_id,body_water,muscle_mass_gm,visceral_fat,weight_gm,bmi,
                                                                                        body_fat,physique_rating,timestamp,calendar_date,metabolic_age,
                                                                                        bone_mass_gm,caloric_intake,source_type)

        SELECT

        (select id from athlete where ath_un=%s),
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bodyWater"]/text()', x))::text::numeric AS body_water,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="muscleMass"]/text()', x))::text::int AS muscle_mass_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="visceralFat"]/text()', x))::text::int AS visceral_fat,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="weight"]/text()', x))::text::numeric AS weight_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bmi"]/text()', x))::text::numeric AS bmi,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="bodyFat"]/text()', x))::text::numeric AS body_fat,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="physiqueRating"]/text()', x))::text::text AS physique_rating,
        to_char(to_timestamp(unnest (xpath('//*[local-name()="item"]/*[local-name()="samplePk"]/text()', x))::text::numeric/1000) AT TIME ZONE 'UTC','YYYY-MM-DD HH24:MI:SS')  AS timestamp,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="calendarDate"]/text()', x))::text::text AS calendar_date,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="metabolicAge"]/text()', x))::text::text AS metabolic_age,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="boneMass"]/text()', x))::text::int AS bone_mass_gm,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="caloricIntake"]/text()', x))::text::text AS caloric_intake,
        unnest (xpath('//*[local-name()="item"]/*[local-name()="sourceType"]/text()', x))::text::text AS source_type
        
        
        FROM UNNEST (xpath('//*[local-name()="root"]', pg_read_file(%s)::xml)) x
                                        
        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
                
        """

    pd_df_sql = """
        INSERT INTO garmin_connect_body_composition(athlete_id,body_water,muscle_mass_gm,visceral_fat,weight_gm,bmi,
                                                        body_fat,physique_rating,timestamp,calendar_date,metabolic_age,
                                                        bone_mass_gm,caloric_intake,source_type)

        VALUES ((select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
          
        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
        """

    #Parse the XML document, and append the data to lists
    xml_document = parse(file_path)
    for item in xml_document.iterfind('item'):
        body_water.append(item.findtext('bodyWater'))
        muscle_mass_gm.append(item.findtext('muscleMass'))
        visceral_fat.append(item.findtext('visceralFat'))
        weight_gm.append(item.findtext('weight'))
        bmi.append(item.findtext('bmi'))
        body_fat.append(item.findtext('bodyFat'))
        physique_rating.append(item.findtext('physiqueRating'))
        timestamp.append(
            datetime.datetime.utcfromtimestamp(
                float(item.findtext('samplePk')) /
                1000).strftime('%Y-%m-%d %H:%M:%S'))
        calendar_date.append(item.findtext('calendarDate'))
        metabolic_age.append(item.findtext('metabolicAge'))
        bone_mass_gm.append(item.findtext('boneMass'))
        caloric_intake.append(item.findtext('caloricIntake'))
        source_type.append(item.findtext('sourceType'))

    #Get the data from lists to dataframe
    df = pd.DataFrame({
        'body_water': body_water,
        'muscle_mass_gm': muscle_mass_gm,
        'visceral_fat': visceral_fat,
        'weight_gm': weight_gm,
        'bmi': bmi,
        'body_fat': body_fat,
        'physique_rating': physique_rating,
        'timestamp': timestamp,
        'calendar_date': calendar_date,
        'metabolic_age': metabolic_age,
        'bone_mass_gm': bone_mass_gm,
        'caloric_intake': caloric_intake,
        'source_type': source_type
    })
    #Replace empty string with None.Required for SQL insert
    df.replace(r'', np.nan, inplace=True)
    df = df.where(pd.notnull(df), None)

    try:

        # connect to the PostgreSQL server
        conn = psycopg2.connect(dbname=db_name,
                                host=db_host,
                                user=superuser_un,
                                password=superuser_pw)

        # create a cursor
        cur = conn.cursor()

        with StdoutRedirection(ath_un):
            print('Inserting Body composition Data into postgreSQL:')
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Body composition Data into postgreSQL:')
        #Iterate through rows in pandas dataframe
        for row in df.itertuples():
            df_body_water = row.body_water
            df_muscle_mass_gm = row.muscle_mass_gm
            df_visceral_fat = row.visceral_fat
            df_weight_gm = row.weight_gm
            df_bmi = row.bmi
            df_body_fat = row.body_fat
            df_physique_rating = row.physique_rating
            df_timestamp = row.timestamp
            df_calendar_date = row.calendar_date
            df_metabolic_age = row.metabolic_age
            df_bone_mass_gm = row.bone_mass_gm
            df_caloric_intake = row.caloric_intake
            df_source_type = row.source_type

            cur.execute(
                pd_df_sql,
                (athlete_id, df_body_water, df_muscle_mass_gm, df_visceral_fat,
                 df_weight_gm, df_bmi, df_body_fat, df_physique_rating,
                 df_timestamp, df_calendar_date, df_metabolic_age,
                 df_bone_mass_gm, df_caloric_intake, df_source_type))

        ### Insert using Xpath method ###
        #cur.execute(xpath_sql,(athlete_id,pg_read_file))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))
    finally:
        if conn is not None:
            conn.close()
            with StdoutRedirection(ath_un):
                print('Body composition Data Inserted Successfully')
コード例 #11
0
def gc_activity_insert(file_path, ath_un, activity, db_host, db_name,
                       superuser_un, superuser_pw, encr_pass):
    """ Connect to the PostgreSQL database server """
    file2import = (file_path, )
    athlete_id = (ath_un, )
    activity_id = (activity, )
    db_name = db_name
    conn = None

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    sql = """DO
      $do$
      BEGIN
      IF

      (SELECT
      count(xpath('//*[local-name()="Lap"]/@StartTime', x))::text::int
      FROM unnest(xpath('//*[local-name()="Lap"]', pg_read_file(%s)::xml)) x) = 1

      THEN

      INSERT INTO garmin_connect_activity (activity_timestamp,sport,athlete_id,gc_activity_id)
      SELECT
            (xpath('//*[local-name()="Activity"]/*[local-name()="Id"]/text()', x))[1]::text::text AS activity_timestamp,
            (xpath('//*[local-name()="Activity"]/@Sport', x))[1]::text::text AS sport,
            (select id from athlete where ath_un=%s),
            %s
      FROM UNNEST 
            (xpath('//*[local-name()="Activity"]',pg_read_file(%s)::xml)) x;



      INSERT INTO garmin_connect_lap (gc_activity_id,lap_id,activity_timestamp,total_time_seconds,distance_meters,maximum_speed,calories,average_hr_bpm,maximum_hr_bpm,intensity,cadence,trigger_method,
                                    extensions_ns3_lx_avgspeed,extensions_ns3_lx_maxbikecadence,extensions_ns3_lx_steps,extensions_ns3_lx_avgwatts,extensions_ns3_lx_maxwatts,extensions_ns3_lx_avgruncadence,
                                    extensions_ns3_lx_maxruncadence)

      SELECT
      %s,
      unnest (xpath('//*[local-name()="Lap"]/@StartTime', x))::text::text AS lap_id,
            (xpath('//*[local-name()="Activity"]/*[local-name()="Id"]/text()', x))[1]::text::text AS activity_timestamp,  
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="TotalTimeSeconds"]/text()', x))::text::numeric AS total_time_seconds,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="DistanceMeters"]/text()', x))::text::numeric AS distance_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="MaximumSpeed"]/text()', x))::text::numeric AS maximum_speed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Calories"]/text()', x))::text::numeric AS calories,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="AverageHeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS average_hr_bpm,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="MaximumHeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS maximum_hr_bpm,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Intensity"]/text()', x))::text::text AS intensity,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Cadence"]/text()', x))::text::integer AS cadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="TriggerMethod"]/text()', x))::text::text AS trigger_method,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgSpeed"]/text()', x))::text::numeric AS extensions_ns3_lx_avgspeed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxBikeCadence"]/text()', x))::text::integer AS extensions_ns3_lx_maxbikecadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="Steps"]/text()', x))::text::integer AS extensions_ns3_lx_steps,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgWatts"]/text()', x))::text::integer AS extensions_ns3_lx_avgwatts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxWatts"]/text()', x))::text::integer AS extensions_ns3_lx_maxwatts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgRunCadence"]/text()', x))::text::integer AS extensions_ns3_lx_avgruncadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxRunCadence"]/text()', x))::text::integer AS extensions_ns3_lx_maxruncadence
      FROM UNNEST (xpath('//*[local-name()="Activity"]', pg_read_file(%s)::xml)) x;

      INSERT INTO garmin_connect_trackpoint (trackpoint_time, lap_id, position_latitude_degrees, position_longitude_degrees,altitude_meters,distance_meters,hr_bpm,extensions_ns3_tpx_speed,extensions_ns3_tpx_watts,extensions_ns3_tpx_runcadence)

      SELECT
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Time"]/text()', x))::text::text AS trackpoint_time,
      (select unnest (xpath('//*[local-name()="Lap"]/@StartTime', x))::text::text AS lap_id
      from unnest(xpath('//*[local-name()="Lap"]', pg_read_file(%s)::xml)) x),
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Position"]/*[local-name()="LatitudeDegrees"]/text()', x))::text::numeric AS position_latitude_degrees,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Position"]/*[local-name()="LongitudeDegrees"]/text()', x))::text::numeric AS position_longitude_degrees,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="AltitudeMeters"]/text()', x))::text::numeric AS altitude_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="DistanceMeters"]/text()', x))::text::numeric AS distance_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="HeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS hr_bpm,
      unnest(xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="Speed"]/text()', x))::text::numeric AS extensions_ns3_tpx_speed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="Watts"]/text()', x))::text::integer AS extensions_ns3_tpx_watts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="RunCadence"]/text()', x))::text::integer AS extensions_ns3_tpx_runcadence

      FROM unnest(xpath('//*[local-name()="Lap"]', pg_read_file(%s)::xml)) x;

      ELSE

      INSERT INTO garmin_connect_activity (activity_timestamp,sport,athlete_id,gc_activity_id)
      SELECT
            (xpath('//*[local-name()="Activity"]/*[local-name()="Id"]/text()', x))[1]::text::text AS activity_timestamp,
            (xpath('//*[local-name()="Activity"]/@Sport', x))[1]::text::text AS sport,(select id from athlete where ath_un=%s),
            %s
      FROM UNNEST (xpath('//*[local-name()="Activity"]',pg_read_file(%s)::xml)) x;


      INSERT INTO garmin_connect_lap (gc_activity_id,lap_id,activity_timestamp,total_time_seconds,distance_meters,maximum_speed,calories,average_hr_bpm,maximum_hr_bpm,intensity,cadence,trigger_method,
                                    extensions_ns3_lx_avgspeed,extensions_ns3_lx_maxbikecadence,extensions_ns3_lx_steps,extensions_ns3_lx_avgwatts,extensions_ns3_lx_maxwatts,extensions_ns3_lx_avgruncadence,
                                    extensions_ns3_lx_maxruncadence)

      SELECT
      %s,
      unnest (xpath('//*[local-name()="Lap"]/@StartTime', x))::text::text AS lap_id,
            (xpath('//*[local-name()="Activity"]/*[local-name()="Id"]/text()', x))[1]::text::text AS activity_timestamp,  
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="TotalTimeSeconds"]/text()', x))::text::numeric AS total_time_seconds,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="DistanceMeters"]/text()', x))::text::numeric AS distance_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="MaximumSpeed"]/text()', x))::text::numeric AS maximum_speed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Calories"]/text()', x))::text::numeric AS calories,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="AverageHeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS average_hr_bpm,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="MaximumHeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS maximum_hr_bpm,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Intensity"]/text()', x))::text::text AS intensity,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Cadence"]/text()', x))::text::integer AS cadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="TriggerMethod"]/text()', x))::text::text AS trigger_method,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgSpeed"]/text()', x))::text::numeric AS extensions_ns3_lx_avgspeed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxBikeCadence"]/text()', x))::text::integer AS extensions_ns3_lx_maxbikecadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="Steps"]/text()', x))::text::integer AS extensions_ns3_lx_steps,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgWatts"]/text()', x))::text::integer AS extensions_ns3_lx_avgwatts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxWatts"]/text()', x))::text::integer AS extensions_ns3_lx_maxwatts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="AvgRunCadence"]/text()', x))::text::integer AS extensions_ns3_lx_avgruncadence,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Extensions"]/*[local-name()="LX"]/*[local-name()="MaxRunCadence"]/text()', x))::text::integer AS extensions_ns3_lx_maxruncadence
      FROM UNNEST (xpath('//*[local-name()="Activity"]', pg_read_file(%s)::xml)) x;

      INSERT INTO garmin_connect_trackpoint (trackpoint_time, lap_id, position_latitude_degrees, position_longitude_degrees,altitude_meters,distance_meters,hr_bpm,extensions_ns3_tpx_speed,extensions_ns3_tpx_watts,extensions_ns3_tpx_runcadence)

      SELECT
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Time"]/text()', x))::text::text AS trackpoint_time,
            (xpath('//*[local-name()="Lap"]/@StartTime', x))[1]::text::text AS lap_id,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Position"]/*[local-name()="LatitudeDegrees"]/text()', x))::text::numeric AS position_latitude_degrees,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="Position"]/*[local-name()="LongitudeDegrees"]/text()', x))::text::numeric AS position_longitude_degrees,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="AltitudeMeters"]/text()', x))::text::numeric AS altitude_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="DistanceMeters"]/text()', x))::text::numeric AS distance_meters,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/
            *[local-name()="HeartRateBpm"]/*[local-name()="Value"]/text()', x))::text::integer AS hr_bpm,
      unnest(xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="Speed"]/text()', x))::text::numeric AS extensions_ns3_tpx_speed,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="Watts"]/text()', x))::text::integer AS extensions_ns3_tpx_watts,
      unnest (xpath('//*[local-name()="Lap"]/*[local-name()="Track"]/*[local-name()="Trackpoint"]/*[local-name()="Extensions"]
            /*[local-name()="TPX"]/*[local-name()="RunCadence"]/text()', x))::text::integer AS extensions_ns3_tpx_runcadence

      FROM unnest(xpath('//*[local-name()="Lap"]', pg_read_file(%s)::xml)) x;

      END IF;

      END
      $do$
      """

    try:

        # connect to the PostgreSQL server
        with ProgressStdoutRedirection(ath_un):
            print(
                'Connecting to the PostgreSQL server to insert TCX activity data...'
            )
        try:
            conn = psycopg2.connect(dbname=db_name,
                                    host=db_host,
                                    user=superuser_un,
                                    password=superuser_pw)
        except Exception as e:
            with ProgressStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))
        # create a cursor
        try:
            cur = conn.cursor()
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '  ' + str(e)))

        # execute a statement
        with StdoutRedirection(ath_un):
            print('Inserting Activity Data into postgreSQL:')
        with ProgressStdoutRedirection(ath_un):
            print('Inserting Activity Data into postgreSQL:')
        cur.execute(
            sql,
            (file2import, athlete_id, activity_id, file2import, activity_id,
             file2import, file2import, file2import, athlete_id, activity_id,
             file2import, activity_id, file2import, file2import))
        conn.commit()

        # close the communication with the PostgreSQL
        cur.close()

    except (Exception, psycopg2.IntegrityError) as ex:
        if ex.pgcode == '23505':
            with StdoutRedirection(ath_un):
                print(
                    'The record for this activity already exists in the database.Skipping...'
                )
            with ErrorStdoutRedirection(ath_un):
                print(
                    'The record for this activity already exists in the database.Skipping...'
                )
    except (Exception, psycopg2.DatabaseError) as error:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '  ' + str(error)))

    finally:
        if conn is not None:
            conn.close()
コード例 #12
0
def mm_data_insert(output, start_date, end_date, ath_un, encr_pass,
                   mm_dbx_link, save_pwd, db_host, db_name, superuser_un,
                   superuser_pw):
    # Before executing the script go to your droppox Apps/Mind Monitor folder.
    # Right click on the folder, select 'Share' option and copy the generated link.
    # Assign the link string to the 'mm_dbx_link' parameter.
    user_output = os.path.join(output, ath_un)
    download_folder = os.path.join(user_output, 'MM_Historical_EEG')
    archive_folder = os.path.join(download_folder, 'Archive')

    #Get PID of the current process and write it in the file
    pid = str(os.getpid())
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    path_params = config(filename="encrypted_settings.ini", section="archive")
    preserve_files = str(path_params.get("preserve_files"))

    if save_pwd == True:
        encrypted_link = base64.b64encode(encrypt(mm_dbx_link, encr_pass))
        encrypted_link = encrypted_link.decode('utf-8')
    else:
        encrypted_link = None

    #PG: insert MM export link into database
    mm_user_insert(encrypted_link, ath_un, db_host, db_name, superuser_un,
                   superuser_pw, encr_pass)

    # Download the MM export csv file from dropbox using share link
    mm_dbx_link = mm_dbx_link[:-1] + '1'  # Replace the 0 at the end of the link with 1.Will download as .zip file.
    response = urllib.request.urlopen(mm_dbx_link)

    # Extract the downloaded folder in to variable
    compressedFile = io.BytesIO()
    compressedFile.write(response.read())
    compressedFile.seek(0)

    #Check if the date variable are of string or datetime type
    if type(start_date) == str:  #Script run as standalone
        start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d")
    else:  #Script run from flask server (variable passes as datetime type)
        start_date_dt = start_date
    if type(end_date) == str:  #Script run as standalone
        end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d")
    else:  #Script run from flask server (variable passes as datetime type)
        end_date_dt = end_date

    # Create output directory (if it does not already exist).
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Create Archive directory (if it does not already exist).
    if preserve_files == "true":
        if not os.path.exists(archive_folder):
            os.makedirs(archive_folder)

    with zipfile.ZipFile(compressedFile, 'r') as z:
        #List the content of the zip file
        zipfile_content = z.namelist()
        #Iterate through list and only extract files within date range
        for item in zipfile_content:
            #Extract date from filename
            item_date = item[12:22]
            if item_date != '':
                item_date_dt = datetime.datetime.strptime(
                    item_date, "%Y-%m-%d")
            else:
                continue
            # Check if the file date is within the download Date range.
            if item_date_dt > end_date_dt + datetime.timedelta(
                    days=1) or item_date_dt < start_date_dt:
                continue
            else:
                #PG: Check whether the data from this file "file_path" have been inserted into to DB during one of the previous runs
                data_file_exists = check_data_file_exists(
                    os.path.join(download_folder, item), ath_un, db_host,
                    db_name, superuser_un, superuser_pw, encr_pass)
                if data_file_exists == True:
                    with StdoutRedirection(ath_un):
                        print(('{} already exists in {}. Skipping.'.format(
                            item, download_folder)))
                    with ProgressStdoutRedirection(ath_un):
                        print(('{} already exists in {}. Skipping.'.format(
                            item, download_folder)))
                    continue
                z.extract(item, download_folder)
                #Insert filepath into the db
                try:
                    data_file_path_insert(os.path.join(download_folder,
                                                       item), ath_un, db_host,
                                          db_name, superuser_un, superuser_pw,
                                          encr_pass)
                except Exception as e:
                    with ErrorStdoutRedirection(ath_un):
                        print((str(datetime.datetime.now()) + ' [' +
                               sys._getframe().f_code.co_name + ']' +
                               ' Error on line {}'.format(
                                   sys.exc_info()[-1].tb_lineno) + '  ' +
                               str(e)))

    for filename in os.listdir(download_folder):
        if filename.endswith(".zip"):
            with zipfile.ZipFile(os.path.join(download_folder, filename),
                                 'r') as z:
                z.extractall(download_folder)
            os.remove(os.path.join(download_folder, filename))
            with StdoutRedirection(ath_un):
                print((
                    'The content of \"{}\" extracted into csv and the original removed'
                    .format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print((
                    'The content of \"{}\" extracted into csv and the original removed'
                    .format(filename)))

    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    conn = None

    # connect to the PostgreSQL server
    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    sql = """

        INSERT INTO mind_monitor_eeg(timestamp,delta_tp9,
        delta_af7,delta_af8,delta_tp10,athlete_id,theta_tp9,theta_af7,theta_af8,theta_tp10,alpha_tp9,alpha_af7,alpha_af8,alpha_tp10,beta_tp9,beta_af7,beta_af8,
        beta_tp10,gamma_tp9,gamma_af7,gamma_af8,gamma_tp10,raw_tp9,raw_af7,raw_af8,raw_tp10,aux_right,accelerometer_x,accelerometer_y,accelerometer_z,
        gyro_x,gyro_y,gyro_z,head_band_on,hsi_tp9,hsi_af7,hsi_af8,hsi_tp10,battery,elements,timestamp_gmt)

        VALUES
        (%s,%s,%s,%s,%s,(select id from athlete where ath_un=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
        ,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)

        ON CONFLICT (athlete_id,timestamp) DO NOTHING;
        
    """

    sql_local2gmt = """

        SELECT gmt_local_difference FROM gmt_local_time_difference WHERE local_date = %s;

    """

    for filename in os.listdir(download_folder):
        if filename.endswith(".csv"):
            # Read/Parse the .csv file and load the data into pandas dataset
            with StdoutRedirection(ath_un):
                print(
                    ('Parsing and inserting the content of \"{}\" into the DB'.
                     format(filename)))
            with ProgressStdoutRedirection(ath_un):
                print(
                    ('Parsing and inserting the content of \"{}\" into the DB'.
                     format(filename)))
            dataset = pd.read_csv(os.path.join(download_folder, filename),
                                  sep=",",
                                  header=None)
            dataset.drop(dataset.index[:1], inplace=True)  #Drop the first row

            for row in dataset.itertuples():
                local_time = row._1
                local_time_dt = datetime.datetime.strptime(
                    (local_time), "%Y-%m-%d %H:%M:%S.%f")
                local_time_str = datetime.datetime.strftime(
                    local_time_dt, "%Y-%m-%d %H:%M:%S.%f")
                local_date_str = datetime.datetime.strftime((local_time_dt),
                                                            "%Y-%m-%d")
                utc_time_str = None
                delta_tp9 = row._2
                delta_af7 = row._3
                delta_af8 = row._4
                delta_tp10 = row._5
                theta_tp9 = row._6
                theta_af7 = row._7
                theta_af8 = row._8
                theta_tp10 = row._9
                alpha_tp9 = row._10
                alpha_af7 = row._11
                alpha_af8 = row._12
                alpha_tp10 = row._13
                beta_tp9 = row._14
                beta_af7 = row._15
                beta_af8 = row._16
                beta_tp10 = row._17
                gamma_tp9 = row._18
                gamma_af7 = row._19
                gamma_af8 = row._20
                gamma_tp10 = row._21
                raw_tp9 = row._22
                raw_af7 = row._23
                raw_af8 = row._24
                raw_tp10 = row._25
                aux_right = row._26
                accelerometer_x = row._27
                accelerometer_y = row._28
                accelerometer_z = row._29
                gyro_x = row._30
                gyro_y = row._31
                gyro_z = row._32
                head_band_on = row._33
                hsi_tp9 = row._34
                hsi_af7 = row._35
                hsi_af8 = row._36
                hsi_tp10 = row._37
                battery = row._38
                elements = row._39

                # Check if the Record Date is within the download Date range.
                if local_time_dt > end_date_dt + datetime.timedelta(
                        days=1) or local_time_dt < start_date_dt:
                    break
                else:
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        cur.execute(sql_local2gmt, (local_date_str, ))
                        result = cur.fetchone()
                        # close the communication with the PostgreSQL
                        conn.commit()
                        cur.close()
                        if result is not None:
                            utc_time_dt = local_time_dt - result[0]
                            utc_time_str = datetime.datetime.strftime(
                                (utc_time_dt), "%Y-%m-%d %H:%M:%S.%f")
                    except (Exception, psycopg2.DatabaseError) as error:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + '  ' + str(error)))
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        cur.execute(
                            sql,
                            (local_time_str, delta_tp9, delta_af7, delta_af8,
                             delta_tp10, ath_un, theta_tp9, theta_af7,
                             theta_af8, theta_tp10, alpha_tp9, alpha_af7,
                             alpha_af8, alpha_tp10, beta_tp9, beta_af7,
                             beta_af8, beta_tp10, gamma_tp9, gamma_af7,
                             gamma_af8, gamma_tp10, raw_tp9, raw_af7, raw_af8,
                             raw_tp10, aux_right, accelerometer_x,
                             accelerometer_y, accelerometer_z, gyro_x, gyro_y,
                             gyro_z, head_band_on, hsi_tp9, hsi_af7, hsi_af8,
                             hsi_tp10, battery, elements, utc_time_str))
                        conn.commit()
                        # close the communication with the PostgreSQL
                        cur.close()

                    except (Exception, psycopg2.DatabaseError) as error:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.now()) + '  ' + str(error)))

            if preserve_files == "false":
                #Remove the csv file from download folder
                os.remove(os.path.join(download_folder, filename))
                with StdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file removed'
                        .format(filename)))
                with ProgressStdoutRedirection(ath_un):
                    print((
                        'The content of \"{}\" parsed and inserted into DB and the original csv file removed'
                        .format(filename)))

            else:
                #Move the csv to archive folder
                if not os.path.exists(os.path.join(archive_folder, filename)):
                    os.rename(os.path.join(download_folder, filename),
                              os.path.join(archive_folder, filename))
                    with StdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                            .format(filename)))
                    with ProgressStdoutRedirection(ath_un):
                        print((
                            'The content of \"{}\" parsed and inserted into DB and the original csv file archived'
                            .format(filename)))
                else:
                    #Remove the csv file from download folder
                    os.remove(os.path.join(download_folder, filename))

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()
コード例 #13
0
def dwnld_insert_nutrition(mfp_username, mfp_password, ath_un, start_date,
                           end_date, encr_pass, save_pwd, auto_synch, db_host,
                           superuser_un, superuser_pw):
    Crypto.Random.atfork()
    db_name = str(str2md5(ath_un)) + '_Athlete_Data_DB'

    if save_pwd == True:
        encrypted_pwd = base64.b64encode(encrypt(mfp_password, encr_pass))
        encrypted_pwd = encrypted_pwd.decode('utf-8')
    else:
        encrypted_pwd = None

    #Get PID of the current process and write it in the file
    pid = (str(os.getpid()))
    pidfile = PID_FILE_DIR + ath_un + '_PID.txt'
    open(pidfile, 'w').write(pid)

    with StdoutRedirection(ath_un):
        print("Attempting to login to MFP...")
    with ProgressStdoutRedirection(ath_un):
        print("Attempting to login to MFP...")
    try:
        client = myfitnesspal.Client(mfp_username, mfp_password)
    except ValueError as e:
        with ErrorStdoutRedirection(ath_un):
            print((str(datetime.datetime.now()) + ' [' +
                   sys._getframe().f_code.co_name + ']' +
                   ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                   '-E1- ' + str(e)))
        with StdoutRedirection(ath_un):
            print(('Wrong MFP credentials for user {}. Skipping.'.format(
                mfp_username)))
        return
    with StdoutRedirection(ath_un):
        print('MFP Login successful! Proceeding...')
    with ProgressStdoutRedirection(ath_un):
        print('MFP Login successful! Proceeding...')

    mfp_user_insert(mfp_username, encrypted_pwd, ath_un, db_host, db_name,
                    superuser_un, superuser_pw,
                    encr_pass)  #PG: insert MFP user details into database

    # read DB connection parameters from ini file
    conn = None

    # connect to the PostgreSQL server
    with ProgressStdoutRedirection(ath_un):
        print('Connecting to the PostgreSQL server...')

    conn = psycopg2.connect(dbname=db_name,
                            host=db_host,
                            user=superuser_un,
                            password=superuser_pw)

    for single_date in daterange(start_date, end_date):
        date_in_range = single_date.strftime("%Y-%m-%d")
        data_exist_for_date = 'Nutrition_Data_For_' + date_in_range

        #PG: Check whether the data for this date have been inserted into to DB during one of the previous runs
        data_exists = check_data_file_exists(data_exist_for_date, ath_un,
                                             db_host, db_name, superuser_un,
                                             superuser_pw, encr_pass)
        if data_exists == True:
            with StdoutRedirection(ath_un):
                print((
                    'Nutrition data for {} already downloaded and inserted to DB. Skipping.'
                    .format(date_in_range)))
            with ProgressStdoutRedirection(ath_un):
                print((
                    'Nutrition data for {} already downloaded and inserted to DB. Skipping.'
                    .format(date_in_range)))
            continue

        with StdoutRedirection(ath_un):
            print(('Downloading nutrition data: ' + date_in_range))
        with ProgressStdoutRedirection(ath_un):
            print(('Downloading nutrition data: ' + date_in_range))

        try:
            day = client.get_date(single_date)
            meals = day.meals

            for meal in meals:
                meal_name = meal.name
                entry = meal.entries
                with StdoutRedirection(ath_un):
                    print(('****' + meal_name + '****'))
                for item in entry:
                    food_item = item.short_name
                    if food_item is None:
                        food_item = 'Generic'
                    with StdoutRedirection(ath_un):
                        print((food_item.encode("utf-8")))
                    units = item.unit
                    if units is None:
                        units = 'piece'
                    quantity = item.quantity
                    if quantity is None:
                        quantity = 1.0
                    nutrients = item.nutrition_information
                    for nutrient, value in nutrients.items():
                        if nutrient == 'fiber':
                            fiber_value = value
                        if nutrient == 'sodium':
                            sodium_value = value
                        if nutrient == 'carbohydrates':
                            carbohydrates_value = value
                        if nutrient == 'calories':
                            calories_value = value
                        if nutrient == 'fat':
                            fat_value = value
                        if nutrient == 'protein':
                            protein_value = value

                    sql = """

                    INSERT INTO mfp_nutrition(athlete_id,date,meal,food_item,units,quantity,fiber,sodium,carbohydrates,calories,fat,protein)

                    VALUES
                    ((select id from athlete where mfp_username=%s),%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                    
                    """
                    try:
                        # create a cursor
                        cur = conn.cursor()
                        # execute a statement
                        with StdoutRedirection(ath_un):
                            print('Inserting record....')
                        with ProgressStdoutRedirection(ath_un):
                            print('Inserting record....')
                        cur.execute(sql,
                                    (mfp_username, date_in_range, meal_name,
                                     food_item, units, quantity, fiber_value,
                                     sodium_value, carbohydrates_value,
                                     calories_value, fat_value, protein_value))
                        conn.commit()
                        # close the communication with the PostgreSQL
                        cur.close()

                    except (Exception, psycopg2.DatabaseError) as error:
                        with ErrorStdoutRedirection(ath_un):
                            print((str(datetime.datetime.now()) + ' [' +
                                   sys._getframe().f_code.co_name + ']' +
                                   ' Error on line {}'.format(
                                       sys.exc_info()[-1].tb_lineno) +
                                   '-E2-  ' + str(error)))

                    # Update the files table
                    data_file_path_insert(data_exist_for_date, ath_un, db_host,
                                          db_name, superuser_un, superuser_pw,
                                          encr_pass)
        except Exception as e:
            with ErrorStdoutRedirection(ath_un):
                print(
                    (str(datetime.datetime.now()) + ' [' +
                     sys._getframe().f_code.co_name + ']' +
                     ' Error on line {}'.format(sys.exc_info()[-1].tb_lineno) +
                     '-E3-  ' + str(e)))
            continue

    # close the communication with the PostgreSQL
    if conn is not None:
        conn.close()

    with StdoutRedirection(ath_un):
        print('--- All nutrition data inserted successfully. ---')
    with ProgressStdoutRedirection(ath_un):
        print('--- All nutrition data inserted successfully. ---')