예제 #1
0
def token_to_shelf(db_entry, stoken):

    db_temp = ShelveHandler()
    db_dict = db_temp.read_shelve_by_keys([
        "db_name",
        "db_type",
        "db_path",
    ])

    dbh = DataBaseHandler(db_type=db_dict["db_type"])
    dbh.set_db_path(db_path=db_dict["db_path"])
    dbh.set_db_name(db_name=db_dict["db_name"])

    print(stoken)

    db_entry["strava_bearer"] = {
        "access_token": stoken["access_token"],
        "refresh_token": stoken["refresh_token"],
        "expires_at": stoken["expires_at"],
        "expires_in": stoken["expires_in"],
        "token_type": stoken["token_type"],
        "athlete_id": stoken["athlete"]["id"]
    }
    print(db_entry)
    dbh.modify_user(user_hash=db_entry.get("user_hash"),
                    key="strava_bearer",
                    value=db_entry["strava_bearer"],
                    mode="update")
예제 #2
0
    def _init_database_handler(self):
        """

        :return:
        """
        if self.core_info.get("db_hash") is None:
            print("The core_info object does not contain an unique user hash")
            print("We can not write data to the database without knowing who")
            print("whom the data belong.")
            print("Check!")
            exit()

        self.dbh = DataBaseHandler(db_type=self.core_info["db_type"])
        self.dbh.set_db_path(db_path=self.core_info["db_path"])
        self.dbh.set_db_name(db_name=self.core_info["db_name"])
예제 #3
0
파일: data.py 프로젝트: XeBoris/sta-api
def all_users():
    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])
    all_users = dbh.get_all_users(by="user_username")

    del dbh
    return jsonify(all_users)
예제 #4
0
파일: data.py 프로젝트: XeBoris/sta-api
def get_user(username):
    user_name = escape(username)

    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])

    all_users = dbh.get_all_users(by="user_username")

    if user_name in all_users:
        user_entry = dbh.search_user(user=user_name, by="username")
    else:
        user_entry = []

    del dbh

    return jsonify(user_entry)
예제 #5
0
파일: data.py 프로젝트: XeBoris/sta-api
def get_leaf(branch_hash):
    user_name = request.args.get('username')
    branch_hash = escape(branch_hash)
    #leaf_hash = escape(leaf_hash)

    print(user_name, branch_hash)

    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])
    all_users = dbh.get_all_users(by="user_username")

    user_entry = dbh.search_user(user=user_name, by="username")
    user_hash = user_entry[0].get("user_hash")

    user_tracks = dbh.read_branch(key="user_hash", attribute=user_hash)

    df = pd.DataFrame(user_tracks)
    df = df[df["track_hash"] == branch_hash]

    leaf_names = [k.get("name") for i, k in df["leaf"].iloc[0].items()]
    leaf_hashes = [k.get("leaf_hash") for i, k in df["leaf"].iloc[0].items()]

    ret_dict = {}
    for i in range(len(leaf_names)):
        leaf_name = leaf_names[i]
        leaf_content = leaf_hashes[i]
        df_i = dbh.read_leaf(directory=leaf_name,
                             leaf_hash=leaf_content,
                             leaf_type="DataFrame")

        print(df_i.head(3))

        ret_dict[leaf_name] = csvdf = df_i.to_json()

    return ret_dict
예제 #6
0
파일: data.py 프로젝트: XeBoris/sta-api
def get_branches():
    user_name = request.args.get('username')

    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])
    all_users = dbh.get_all_users(by="user_username")

    user_entry = dbh.search_user(user=user_name, by="username")
    user_hash = user_entry[0].get("user_hash")

    user_tracks = dbh.read_branch(key="user_hash", attribute=user_hash)

    df = pd.DataFrame(user_tracks)

    df["start_time"] = pd.to_datetime(df["start_time"], unit="ms")
    df["end_time"] = pd.to_datetime(df["end_time"], unit="ms")
    df["created_at"] = pd.to_datetime(df["created_at"], unit="ms")
    df["updated_at"] = pd.to_datetime(df["updated_at"], unit="ms")
    df = df.sort_values(by="start_time", ascending=False)

    csvdf = df.to_json()

    return jsonify(csvdf)
예제 #7
0
def get_user(user):
    db_temp = ShelveHandler()
    db_dict = db_temp.read_shelve_by_keys([
        "db_name",
        "db_type",
        "db_path",
    ])

    dbh = DataBaseHandler(db_type=db_dict["db_type"])
    dbh.set_db_path(db_path=db_dict["db_path"])
    dbh.set_db_name(db_name=db_dict["db_name"])

    user_entry = dbh.search_user(user=user, by="username")
    db_entry = user_entry[0]

    del dbh
    del db_temp
    return db_entry
예제 #8
0
class Runtastic():
    def __init__(self):
        self.input_type = None
        self.path = None
        self.core_info = None

        self.path_photos = None
        self.path_purchases = None
        self.path_routes = None
        self.path_sessions = None
        self.path_user = None
        self.path_weight = None

        # Data base handlers
        self.dbh = None

        #Init second classes:
        self.bp = Blueprint()  #Blueprints

        self.tm = TypeMapper()
        self.tm.set_track_source(track_source="runtastic")
        self.tm.loader()

    def __del__(self):
        del self.bp
        del self.tm

    def _init_database_handler(self):
        """

        :return:
        """
        if self.core_info.get("db_hash") is None:
            print("The core_info object does not contain an unique user hash")
            print("We can not write data to the database without knowing who")
            print("whom the data belong.")
            print("Check!")
            exit()

        self.dbh = DataBaseHandler(db_type=self.core_info["db_type"])
        self.dbh.set_db_path(db_path=self.core_info["db_path"])
        self.dbh.set_db_name(db_name=self.core_info["db_name"])

    def _close_database_handler(self):
        del self.dbh

    def _read_json(self, fjson):
        """
        A simple private member function for reading a json file
        :param fjson:
        :return:
        """
        with open(fjson) as f:
            data = json.load(f)
        f.close()
        return data

    def _get_all_sport_session_ids(self):
        """
        Read all your sport sessions from the available database dump.
        Assume that the <UUID>.json file structure holds for unique
        files. 1 UUID == 1 file == 1 sports activity
        """

        session_ids = []
        for (dirpath, dirnames, filenames) in os.walk(self.path_sessions):
            session_ids.extend(filenames)
            break

        session_ids = [i.replace(".json", "") for i in session_ids]
        return session_ids

    def configure_core(self, core_info):
        """
        Core configuration is meant to setup the database handler of the sta-core
        for the third parity application "Runtastic" in order to write the gained
        information to the right database setup.
        A core configuration consists of a dictionary with four major entries:
        - db_path -> The path to the database
        - db_name -> The name of database
        - db_type -> The type of the database
        - db_hash -> The unique hash of an existing user in that database.

        :param core_info: dict
        :return:
        """
        self.core_info = core_info

    def setup_path(self, type=None, path=None):
        self.input_type = type
        self.path = path
        if type == "database":
            self.path_photos = os.path.join(self.path, "Photos")
            self.path_purchases = os.path.join(self.path, "Purchases")
            self.path_routes = os.path.join(self.path, "Routes")
            self.path_sessions = os.path.join(self.path, "Sport-sessions")
            self.path_user = os.path.join(self.path, "User")
            self.path_weight = os.path.join(self.path, "Weight")

    def get_session_Ids(self):

        if self.input_type == "database":
            return self._get_all_sport_session_ids()

    def _get_rt_db_track_info(self, session_id):
        """
        Get Runtastic Database Track Information

        :param session_id:
        :return:
        """
        pass

    def _read_session_by_id_from_database(self, session_id):
        """
        We will read the Runtastic database dump in

        :param session_id:
        :return:
        """
        # create temporally session path:
        json_path_info = os.path.join(self.path_sessions, f"{session_id}.json")
        json_path_gps = os.path.join(self.path_sessions, "GPS-data",
                                     f"{session_id}.json")
        json_path_elv = os.path.join(self.path_sessions, "Elevation-data",
                                     f"{session_id}.json")

        # read session info:
        json_info = self._read_json(json_path_info)

        # We will receive meta data from RunTastic First:
        blueprint_session = self.bp.get_branch_blueprint(version="2")

        blueprint_session["start_time"] = json_info.get("start_time")
        blueprint_session["end_time"] = json_info.get("end_time")
        blueprint_session["created_at"] = json_info.get("created_at")
        blueprint_session["updated_at"] = json_info.get("updated_at")
        blueprint_session["title"] = json_info.get("notes")
        blueprint_session["notes"] = json_info.get("notes")
        blueprint_session["timezone_offset"] = json_info.get(
            "start_time_timezone_offset")
        blueprint_session["sports_type"] = self.tm.mapper(
            json_info.get("sport_type_id"))
        blueprint_session["source"] = "RTDB"

        # We extract timestamps and timestamp names
        dtime = datetime.datetime.utcfromtimestamp(
            json_info["start_time"] / 1000).strftime('%Y-%m-%dT%H:%M:%SZ')
        dtime_name = datetime.datetime.utcfromtimestamp(
            json_info["start_time"] / 1000).strftime('%Y-%m-%d-%H-%M')

        json_info_meta = self.bp.runtastic_metadata(json_info)

        # We will receive the track (gpx) related information about the track
        # This needs two steps:
        # 1) Read and transform the json objects from database dump (if existing)
        # 2) Merge them into one object with all "raw" information what is available
        # - step 1)
        if os.path.exists(json_path_gps):
            json_gps = self._read_json(json_path_gps)
            data_gps = self.bp.runtastic_session_lonlat(json_gps)
        else:
            data_gps = {}

        if os.path.exists(json_path_elv):
            json_ele = self._read_json(json_path_elv)
            data_ele = self.bp.runtastic_session_elevation(json_ele)
        else:
            data_ele = {}
        # - step 2:
        data_gpx_final = []
        if len(data_gps) > 0 and len(data_ele) > 0:
            for key, val in data_gps.items():
                ele = data_ele.get(key)
                if ele is not None:
                    ret = {**val, **ele}
                    ret["timestamp"] = key
                else:
                    ret = val
                    ret["timestamp"] = key

                # adjust timestamp (Move to UTC):
                ret["timestamp"] = datetime.datetime.strptime(
                    ret["timestamp"], '%Y-%m-%d %H:%M:%S %z').timestamp()

                data_gpx_final.append(ret)
        elif len(data_gps) > 0 and len(data_ele) == 0:
            for key, val in data_gps.items():
                ret = val
                ret["timestamp"] = key

                # adjust timestamp (Move to UTC):
                ret["timestamp"] = datetime.datetime.strptime(
                    ret["timestamp"], '%Y-%m-%d %H:%M:%S %z').timestamp()

                data_gpx_final.append(ret)
        elif len(data_gps) == 0 and len(data_ele) == 0:
            pass

        # data_gpx_final #final gpx object
        return {
            "timestamp": dtime,
            "timestampName": dtime_name,
            "json_info": blueprint_session,
            "json_info_meta": json_info_meta,
            "gpx": data_gpx_final
        }

    def import_runtastic_sessions(self, overwrite=False):
        """
        You can always import sessions based on the source of runtastic
        Nevertheless, the type is important of how import the sessions

        :return:
        """
        self._init_database_handler()

        #extract the user hash from the core_info dictionary
        user_hash = self.core_info.get("db_hash")

        if self.input_type == "database":
            # This if conditions handles the runtastic database
            # dump as input only:

            # Get all session IDs first:
            all_session_ids = self._get_all_sport_session_ids()

            for session_id in all_session_ids:
                # Extract runtastic relevant data from the database dump
                # and fetch the information from the return object, which is
                # a json object:
                rt_obj = self._read_session_by_id_from_database(session_id)

                # We will handle now several leafs:
                # ---------------------------------

                # We create the branch first from the database dump:
                rt_json = rt_obj.get("json_info")

                #use the branch to write print outputs for now:
                print(
                    f"Write: {rt_json.get('title')} of sports type {rt_json.get('sports_type')} into DB"
                )

                # We add a track_hash to each track to make it unique:
                hash_str = f"{rt_json.get('start_time')}{rt_json.get('end_time')}"
                hash_str = hashlib.md5(
                    hash_str.encode("utf-8")).hexdigest()[0:8]
                rt_json["track_hash"] = hash_str

                # We add the user specific hash to the track/branch for identification:
                rt_json["user_hash"] = user_hash

                self.dbh.write_branch(db_operation="update",
                                      track=rt_json,
                                      track_hash=hash_str)

                # Prepare to fill leafs:
                if len(rt_obj.get("gpx")) == 0:
                    continue
                df = pd.DataFrame.from_dict(rt_obj.get("gpx"))

                # GPS LEAF:
                # We create a branch which holds only gps relevant information:
                # gps relevant infomation:
                # HINT:
                # - These are also defined in blueprint.py for the GPS leaf!
                # - Don't add/remove here something what is not in line with it.
                obj_gps_defintion = [
                    "timestamp", "longitude", "latitude", "altitude",
                    "accuracy_v", "accuracy_h", "version"
                ]
                df_sel = df[df.columns & obj_gps_defintion]

                # Create leaf configuration:
                leaf_config = self.dbh.create_leaf_config(
                    leaf_name="gps",
                    track_hash=hash_str,
                    columns=obj_gps_defintion)

                # Write the first leaf:
                r = self.dbh.write_leaf(track_hash=hash_str,
                                        leaf_config=leaf_config,
                                        leaf=df_sel,
                                        leaf_type="DataFrame")
                if r is True:
                    print("GPS leaf written")
                    del df_sel

                # SECOND LEAF:
                # We create a branch which holds only gps relevant information:
                # gps relevant information:
                # HINT:
                # - These are also defined in blueprint.py for the GPS leaf!
                # - Don't add/remove here something what is not in line with it.
                obj_gps_defintion = [
                    "timestamp", "speed", "duration", "distance",
                    "elevation_gain", "elevation_loss", "elevation", "version"
                ]

                # Select from dataframe:
                df_sel = df[df.columns & obj_gps_defintion]

                # Create leaf configuration:
                leaf_config = self.dbh.create_leaf_config(
                    leaf_name="runtastic_distances",
                    track_hash=hash_str,
                    columns=obj_gps_defintion)

                # Write the second leaf:
                r = self.dbh.write_leaf(track_hash=hash_str,
                                        leaf_config=leaf_config,
                                        leaf=df_sel,
                                        leaf_type="DataFrame")
                if r is True:
                    print("Runtastic distance leaf written")
                    del df_sel

                # Third Leaf: Meta data information:
                # ----------------------------------
                # Extract the json object and put it into a list for the Pandas dataframe:
                rt_metadata = [rt_obj.get("json_info_meta")]
                df_metadata = pd.DataFrame.from_dict(rt_metadata)

                # We do not extract sub information, so take all columns for the object definition
                obj_definition = list(df_metadata.keys())

                # Create leaf configuration:
                leaf_config = self.dbh.create_leaf_config(
                    leaf_name="runtastic_metadata",
                    track_hash=hash_str,
                    columns=obj_definition)

                # Write the second leaf:
                r = self.dbh.write_leaf(track_hash=hash_str,
                                        leaf_config=leaf_config,
                                        leaf=df_metadata,
                                        leaf_type="DataFrame")
                if r is True:
                    print("Runtastic metadata leaf written")
                    del df_metadata

        #Close the database handler
        self._close_database_handler()
예제 #9
0
def get_handler():
    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])
    return dbh
예제 #10
0
def db_exists():
    dbh = DataBaseHandler(db_type=global_dict["db-type"])
    dbh.set_db_path(db_path=global_dict["db-path"])
    dbh.set_db_name(db_name=global_dict["db-name"])
    db_exists = dbh.get_database_exists()
    return db_exists
예제 #11
0
파일: cli.py 프로젝트: XeBoris/sta-cli
def main():
    """Console script for sportstrackeranalyzer."""
    parser = argparse.ArgumentParser()
    parser.add_argument('_', nargs='*')
    parser.add_argument('--type', dest='type', type=str)
    parser.add_argument('--path', dest='path', type=str)

    parser.add_argument('--key', dest='key', type=str)
    parser.add_argument('--value', dest='value', type=str)
    parser.add_argument('--date', dest='date', type=str)
    parser.add_argument('--hash', dest='hash', type=str)
    parser.add_argument('--overwrite', dest='overwrite', action='store_true')

    parser.add_argument('--track-source', dest='track_source',
                        type=str)  #runtastic, strava,...
    parser.add_argument('--source-type', dest='source_type',
                        type=str)  #db-dump, gps, online

    args = parser.parse_args()

    print("Arguments: " + str(args))
    print("Replace this message by putting your code into "
          "sportstrackeranalyzer.cli.main")

    print(args._)

    if args._[0] == "createDB":
        db_name = args._[1]
        db_path = args.path
        db_type = args.type
        create_db(db_type=db_type, db_path=db_path, db_name=db_name)

    elif args._[0] == "loadDB":
        db_name = args._[1]
        db_path = args.path
        db_type = args.type

        load_db(db_type=db_type, db_path=db_path, db_name=db_name)

    elif args._[0] == "setUser":
        db_user = args._[1]
        set_user(db_user=db_user)

    elif args._[0] == "addUser":
        """
        We add a new user to our Database
        """
        #Bind the CLI interface to the database core:
        db_temp = ShelveHandler()
        db_dict = db_temp.read_shelve_by_keys(
            ["db_name", "db_type", "db_path"])

        dbh = DataBaseHandler(db_type=db_dict["db_type"])
        dbh.set_db_path(db_path=db_dict["db_path"])
        dbh.set_db_name(db_name=db_dict["db_name"])

        #Getting CLI response handler from sta-cli
        init_user_dictionary = collect_cli_user_info()

        dbh.create_user(init_user_dictionary)
        del dbh
        del db_temp

    elif args._[0] == "listShelve":
        #all allowed key arguments:
        key_args = ["all-keys", "key-values", "shelve-path"]

        #Handle simple_action in the core module:
        shelve_key = args.key
        ret = list_shelve(shelve_key)

        #prepare CLI output:
        print(f"Overview: {shelve_key}")
        print()
        if shelve_key == "all-keys":
            print(f"All keys are: {ret}")
        elif shelve_key == "key-values":
            for k, val in ret.items():
                if isinstance(val, str):
                    print(f" [{k}] - {val}")
                elif isinstance(val, dict):
                    for j, jval in val.items():
                        print(f" [{k}] - [{j}] - {jval}")
        elif shelve_key == "shelve-path":
            print(f"Current shelve paths: {ret}")
        else:
            print("You need to specify one of the following arguments:")
            for i in key_args:
                print(f"  --key {i}")
        print()

    elif args._[0] == "listUser":
        """
        Here we are performing operations regarding listing users from the
        connected database core.
        """

        #Bind the CLI interface to the database core:
        db_temp = ShelveHandler()
        db_dict = db_temp.read_shelve_by_keys(
            ["db_name", "db_type", "db_path"])

        dbh = DataBaseHandler(db_type=db_dict["db_type"])
        dbh.set_db_path(db_path=db_dict["db_path"])
        dbh.set_db_name(db_name=db_dict["db_name"])

        #Start to handle list requests.
        search_result = dbh.search_user("koenigbb", by="username")

        all_hashes = dbh.get_all_users("user_hash")

        for i_hash in all_hashes:
            i_user = dbh.search_user(i_hash, by="hash")
            if len(i_user) == 0:
                continue
            i_user = i_user[0]
            user_line = f"{i_user.get('user_surname')} {i_user.get('user_lastname')}: {i_user.get('user_username')} / {i_user.get('user_hash')}"

            print(user_line)

        del db_temp
        del dbh

    elif args._[0] == "modUser":
        # prepare to modify the user database
        db_key = args.key
        db_value = args.value
        db_date = args.date
        mod_user(key=db_key, value=db_value, date=db_date)

    elif args._[0] == "addTracks":

        #Handle argument inputs
        track_source = args.track_source
        source_type = args.source_type
        overwrite = args.overwrite
        input_path = args.path
        date_obj = args.date

        # Bind the CLI interface to the database core:
        db_temp = ShelveHandler()
        db_dict = db_temp.read_shelve_by_keys(db_temp.get_all_shelve_keys())

        dbh = DataBaseHandler(db_type=db_dict["db_type"])
        dbh.set_db_path(db_path=db_dict["db_path"])
        dbh.set_db_name(db_name=db_dict["db_name"])

        dbh_info = {
            "db_type": db_dict["db_type"],
            "db_path": db_dict["db_path"],
            "db_name": db_dict["db_name"],
            "db_hash": db_dict["db_hash"]
        }

        add_tracks(core_information=dbh_info,
                   track_source=track_source,
                   source_type=source_type,
                   input_path=input_path,
                   overwrite=overwrite,
                   date_obj=date_obj)

    elif args._[0] == "findTracks":
        track_source = args.track_source
        source_type = args.source_type
        date = args.date

        find_tracks(track_source, source_type, date)

    elif args._[0] == "removeTracks":
        track_hash = args.hash

        remove_tracks(track_hash)

    elif args._[0] == "removeLeaves":
        track_hash = args.hash

        remove_leaves(track_hash)

    elif args._[0] == "authorizeStrava":
        from .strava_auth_routes import urls_blueprint
        import webbrowser
        from threading import Timer
        from flask import Flask

        def open_browser():
            webbrowser.open_new('http://127.0.0.1:5000/')

        app = Flask(__name__)
        app.register_blueprint(urls_blueprint)
        Timer(1, open_browser).start()
        app.run(debug=True)

    return 0