Exemple #1
0
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        # TODO: Won't work with custom filepath
        current_db_version = detect_db_version(data_dir,
                                               max_version=LATEST_VERSION)

        if current_db_version is not None and current_db_version < LATEST_VERSION:
            # DB file found but was of an older version
            logger.info(
                "Latest version database file found was of an older version")
            logger.info("Creating database file for new version {}".format(
                LATEST_VERSION))
            logger.warning(
                "ActivityWatch does not currently support database migrations, new database file will be empty"
            )

        if not filepath:
            filename = 'peewee-sqlite' + (
                '-testing'
                if testing else '') + ".v{}".format(LATEST_VERSION) + '.db'
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info("Using database file: {}".format(filepath))

        # db.connect()

        self.bucket_keys = {}  # type: Dict[str, int]
        if not BucketModel.table_exists():
            BucketModel.create_table()
        if not EventModel.table_exists():
            EventModel.create_table()
        self.update_bucket_keys()
    def __init__(self, client: ActivityWatchClient) -> None:
        threading.Thread.__init__(self, daemon=True)

        self.client = client

        self.connected = False
        self._stop_event = threading.Event()

        # Buckets that will have events queued to them, will be created if they don't exist
        self._registered_buckets = []  # type: List[Bucket]

        self._attempt_reconnect_interval = 10

        # Setup failed queues file
        data_dir = get_data_dir("aw-client")
        queued_dir = os.path.join(data_dir, "queued")
        if not os.path.exists(queued_dir):
            os.makedirs(queued_dir)

        persistqueue_path = os.path.join(
            queued_dir,
            "{}{}.v{}.persistqueue".format(self.client.client_name, "-testing" if client.testing else "", self.VERSION)
        )
        self._persistqueue = persistqueue.FIFOSQLiteQueue(persistqueue_path, multithreading=True, auto_commit=False)
        self._current = None  # type: Optional[QueuedRequest]
Exemple #3
0
    def __init__(self, client: ActivityWatchClient) -> None:
        threading.Thread.__init__(self, daemon=True)

        self.client = client

        self.connected = False
        self._stop_event = threading.Event()

        # Buckets that will have events queued to them, will be created if they don't exist
        self._registered_buckets = []  # type: List[Bucket]

        self._attempt_reconnect_interval = 10

        # Setup failed queues file
        data_dir = get_data_dir("aw-client")
        queued_dir = os.path.join(data_dir, "queued")
        if not os.path.exists(queued_dir):
            os.makedirs(queued_dir)

        persistqueue_path = os.path.join(
            queued_dir, "{}{}.v{}.persistqueue".format(
                self.client.name, "-testing" if client.testing else "",
                self.VERSION))
        self._persistqueue = persistqueue.FIFOSQLiteQueue(persistqueue_path,
                                                          multithreading=True,
                                                          auto_commit=False)
        self._current = None  # type: Optional[QueuedRequest]
    def __init__(self, testing):
        self.testing = testing
        data_dir = get_data_dir("aw-server")

        ds_name = self.sid + ('-testing' if testing else '')
        filename = ds_name + ".v{}".format(LATEST_VERSION) + '.db'
        filepath = os.path.join(data_dir, filename)
        new_db_file = not os.path.exists(filepath)
        self.conn = sqlite3.connect(filepath)
        logger.info("Using database file: {}".format(filepath))

        # Create tables
        self.conn.execute(CREATE_BUCKETS_TABLE)
        self.conn.execute(CREATE_EVENTS_TABLE)
        self.conn.execute(INDEX_BUCKETS_TABLE_ID)
        self.conn.execute(INDEX_EVENTS_TABLE_STARTTIME)
        self.conn.execute(INDEX_EVENTS_TABLE_ENDTIME)
        self.conn.execute("PRAGMA journal_mode=WAL;")
        self.commit()

        if new_db_file:
            logger.info("Created new SQlite db file")
            from aw_datastore import check_for_migration
            check_for_migration(self, ds_name, LATEST_VERSION)

        self.last_commit = datetime.now()
        self.num_uncommited_statements = 0
Exemple #5
0
    def __init__(self, testing):
        self.testing = testing
        data_dir = get_data_dir("aw-server")

        ds_name = self.sid + ('-testing' if testing else '')
        filename = ds_name + ".v{}".format(LATEST_VERSION) + '.db'
        filepath = os.path.join(data_dir, filename)
        new_db_file = not os.path.exists(filepath)
        self.conn = sqlite3.connect(filepath)
        logger.info("Using database file: {}".format(filepath))

        # Create tables
        self.conn.execute(CREATE_BUCKETS_TABLE)
        self.conn.execute(CREATE_EVENTS_TABLE)
        self.conn.execute(INDEX_BUCKETS_TABLE_ID)
        self.conn.execute(INDEX_EVENTS_TABLE_STARTTIME)
        self.conn.execute(INDEX_EVENTS_TABLE_ENDTIME)
        self.conn.execute("PRAGMA journal_mode=WAL;")
        self.commit()

        if new_db_file:
            logger.info("Created new SQlite db file")
            from aw_datastore import check_for_migration
            check_for_migration(self, ds_name, LATEST_VERSION)

        self.last_commit = datetime.now()
        self.num_uncommited_statements = 0
Exemple #6
0
def get_device_id() -> str:
    path = Path(get_data_dir("aw-server")) / "device_id"
    if path.exists():
        with open(path, "r") as f:
            return f.read()
    else:
        uuid = str(uuid4())
        with open(path, "w") as f:
            f.write(uuid)
        return uuid
def check_for_migration(datastore: AbstractStorage):
    data_dir = get_data_dir("aw-server")

    if datastore.sid == "sqlite":
        peewee_type = "peewee-sqlite"
        peewee_name = peewee_type + ("-testing" if datastore.testing else "")
        # Migrate from peewee v2
        peewee_db_v2 = detect_db_files(data_dir, peewee_name, 2)
        if len(peewee_db_v2) > 0:
            peewee_v2_to_sqlite_v1(datastore)
def check_for_migration(datastore: AbstractStorage, datastore_name: str, version: int):
    data_dir = get_data_dir("aw-server")

    if datastore.sid == "sqlite":
        peewee_type = "peewee-sqlite"
        peewee_name = peewee_type + "-testing" if datastore.testing else ""
        # Migrate from peewee v2
        peewee_db_v2 = detect_db_files(data_dir, peewee_name, 2)
        if len(peewee_db_v2) > 0:
            peewee_v2_to_sqlite_v1(datastore)
Exemple #9
0
def main() -> None:
    """
    Start aw-watcher-terminal
    See the docs for usage
    """

    args = parse_args()

    # Load configurations
    setup_logging(client_id,
                  testing=args.testing,
                  verbose=args.verbose,
                  log_stderr=True,
                  log_file=True)

    # Create MessageHandler to which the fifo messages will be passed
    with MessageHandler(testing=args.testing) as message_handler:

        # Setup and open named pipe
        fifo_path = "{}/aw-watcher-terminal-fifo".format(
            get_data_dir(client_id))
        setup_named_pipe(fifo_path)
        pipe_fd = os.open(fifo_path, os.O_RDONLY | os.O_NONBLOCK)

        with os.fdopen(pipe_fd) as pipe:
            logger.info("Listening to pipe: {}".format(fifo_path))
            """
            Periodically read pipe for new messages
            and update the event queue
            """
            while True:
                # Read new messages from the named pipe
                try:
                    message = pipe.read()
                    if message:
                        message_handler.handle_fifo_message(message)
                except Exception as e:
                    logger.error(e)
                    traceback.print_exc()

                # Update event queue of the message handler
                try:
                    message_handler.update_event_queue()
                except Exception as e:
                    logger.error(e)
                    traceback.print_exc()

                sleep(1)
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        if not filepath:
            filename = 'peewee-sqlite' + ('-testing' if testing else '') + ".v{}".format(LATEST_VERSION) + '.db'
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info("Using database file: {}".format(filepath))

        self.db.connect()

        self.bucket_keys = {}  # type: Dict[str, int]
        BucketModel.create_table(safe=True)
        EventModel.create_table(safe=True)
        self.update_bucket_keys()
Exemple #11
0
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        if not filepath:
            filename = 'peewee-sqlite' + ('-testing' if testing else '') + ".v{}".format(LATEST_VERSION) + '.db'
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info("Using database file: {}".format(filepath))

        self.db.connect()

        self.bucket_keys: Dict[str, int] = {}
        BucketModel.create_table(safe=True)
        EventModel.create_table(safe=True)
        self.update_bucket_keys()
Exemple #12
0
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        if not filepath:
            filename = ("peewee-sqlite" + ("-testing" if testing else "") +
                        f".v{LATEST_VERSION}" + ".db")
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info(f"Using database file: {filepath}")

        self.db.connect()

        self.bucket_keys: Dict[str, int] = {}
        BucketModel.create_table(safe=True)
        EventModel.create_table(safe=True)
        self.update_bucket_keys()
Exemple #13
0
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        if not filepath:
            filename = ("peewee-sqlite" + ("-testing" if testing else "") +
                        ".v{}".format(LATEST_VERSION) + ".db")
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info("Using database file: {}".format(filepath))

        self.db.connect()

        self.bucket_keys: Dict[str, int] = {}
        self.applications_keys: Dict[str, int] = {}
        BucketModel.create_table(safe=True)
        EventModel.create_table(safe=True)
        ApplicationsModel.create_table(safe=True)
        SummaryModel.create_table(safe=True)
        # ApplicationCategoryModel.create_table(safe=True)
        # ApplicationCategoryMappingModel.create_table(safe=True)
        self.update_bucket_keys()
        self.update_application_keys()
Exemple #14
0
    def __init__(self,
                 testing,
                 filepath: str = None,
                 enable_lazy_commit=True) -> None:
        self.testing = testing
        self.enable_lazy_commit = enable_lazy_commit

        # Ignore the migration check if custom filepath is set
        ignore_migration_check = filepath is not None

        ds_name = self.sid + ("-testing" if testing else "")
        if not filepath:
            data_dir = get_data_dir("aw-server")
            filename = ds_name + ".v{}".format(LATEST_VERSION) + ".db"
            filepath = os.path.join(data_dir, filename)

        new_db_file = not os.path.exists(filepath)
        self.conn = sqlite3.connect(filepath)
        logger.info("Using database file: {}".format(filepath))

        # Create tables
        self.conn.execute(CREATE_BUCKETS_TABLE)
        self.conn.execute(CREATE_EVENTS_TABLE)
        self.conn.execute(INDEX_BUCKETS_TABLE_ID)
        self.conn.execute(INDEX_EVENTS_TABLE_STARTTIME)
        self.conn.execute(INDEX_EVENTS_TABLE_ENDTIME)
        self.conn.execute("PRAGMA journal_mode=WAL;")
        self.commit()

        if new_db_file and not ignore_migration_check:
            logger.info("Created new SQlite db file")
            from aw_datastore import check_for_migration

            check_for_migration(self)

        self.last_commit = datetime.now()
        self.num_uncommited_statements = 0
Exemple #15
0
    def __init__(self, testing: bool = True, filepath: str = None) -> None:
        data_dir = get_data_dir("aw-server")

        if not filepath:
            filename = 'peewee-sqlite' + ('-testing' if testing else '') + ".v{}".format(LATEST_VERSION) + '.db'
            filepath = os.path.join(data_dir, filename)
        self.db = _db
        self.db.init(filepath)
        logger.info("Using database file: {}".format(filepath))

        self.db.connect()

        self.bucket_keys = {}  # type: Dict[str, int]
        if not BucketModel.table_exists():
            BucketModel.create_table()
        if not EventModel.table_exists():
            EventModel.create_table()
        self.update_bucket_keys()

#         self.db.execute_sql("""
#         create table if not exists resource (
#   id integer PRIMARY KEY AUTOINCREMENT,
#   eventmodel_id integer REFERENCES eventmodel (id),
#   timestamp datetime,
#   duration decimal(10,5),
#   project varchar(255),
#   identifier varchar(255),
#   type varchar(255)
# );
#         """)

        self.db.execute_sql("""
        create table if not exists resource (
          id integer PRIMARY KEY AUTOINCREMENT,
          eventmodel_id integer REFERENCES eventmodel (id),
          
          timestamp datetime,
          duration decimal(10,5),
          tracker_name varchar(255),
          tracker_type varchar(255),
          app_name varchar(255),
          app_title varchar(255),
          reference varchar(255),
          project varchar(255)
        );
        """)


        self.db.execute_sql("""
            create trigger if not exists add_resource
                after insert on eventmodel
                for each row
                    begin
                    insert into resource (eventmodel_id, timestamp, duration, tracker_name, tracker_type, app_name, app_title, reference, project)
                    values (
                        new.id,
                        new.timestamp,
                        new.duration,
                        (
                          select client
                          from bucketmodel
                          where key = new.bucket_id
                        ),  --  as tracker_name
                        (
                            select type
                            from bucketmodel
                            where key = new.bucket_id
                        ), --  as tracker_type
                        (
                            select
                                case type
                                    when 'app.editor.activity' then NULL
                                    when 'web.tab.current' then NULL
                                    when 'currentwindow' then json_extract(new.datastr, '$.app')
                                end
                            from bucketmodel
                            where key = new.bucket_id
                        ),  --  as app_name
                        (
                            select
                                case type
                                    when 'app.editor.activity' then NULL
                                    when 'web.tab.current' then
                                        case bucketmodel.id
                                            when 'aw-watcher-web-firefox' then json_extract(new.datastr, '$.title') || ' - Mozilla Firefox'
                                            when 'aw-watcher-web-chrome' then json_extract(new.datastr, '$.title') || ' - Google Chrome'
                                        end
                                    when 'currentwindow' then json_extract(new.datastr, '$.title')
                                end
                            from bucketmodel
                            where key = new.bucket_id
                        ),  --  as app_title
                        (
                            select
                                case type
                                    when 'app.editor.activity' then json_extract(new.datastr, '$.file')
                                    when 'web.tab.current' then json_extract(new.datastr, '$.url')
                                    when 'currentwindow' then json_extract(new.datastr, '$.title')
                                end
                            from bucketmodel
                            where key = new.bucket_id
                        ),  --  as reference
                        (
                          select json_extract(datastr, '$.projectPath')
                          from eventmodel
                          where json_extract(datastr, '$.project') is not null
                            and timestamp >= datetime('now', '-10 minutes')
                          order by timestamp desc
                          limit 1
                        ) -- as project
                    );
            end;
        """)

        self.db.execute_sql("""
        CREATE TRIGGER if not exists update_duration AFTER UPDATE ON eventmodel
        WHEN OLD.duration != new.duration
        begin
            update resource
            set duration = new.duration
            where eventmodel_id = new.id;
        end;
        """)