Esempio n. 1
0
def main(ctx, testing: bool, host: str, port: int):
    ctx.obj = _Context()
    ctx.obj.client = aw_client.ActivityWatchClient(
        host=host,
        port=port if port != 5600 else (5666 if testing else 5600),
        testing=testing,
    )
Esempio n. 2
0
def main(ctx, testing: bool, verbose: bool, host: str, port: int):
    ctx.obj = _Context()
    ctx.obj.client = aw_client.ActivityWatchClient(
        host=host,
        port=port if port != 5600 else (5666 if testing else 5600),
        testing=testing,
    )
    logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
Esempio n. 3
0
def get_yt_videos():
    awapi = aw_client.ActivityWatchClient("thankful-test", testing=True)
    web_events = awapi.get_events(bucket_id="aw-watcher-web-chrome", limit=-1)

    yt_videos = find_youtube_content(web_events)
    for video in yt_videos:
        video.add_youtube_data()
    return yt_videos
def main():
    """
    Inserts all events from one bucket into another bucket, after checking for
    overlap (which you shouldn't have if it was caused by a changing hostname).

    Useful to fix duplicate buckets caused by a changing hostname, as in this issue:
      https://github.com/ActivityWatch/activitywatch/issues/454
    """

    # You need to set testing=False if you're going to run this on your normal instance
    aw = aw_client.ActivityWatchClient(testing=True)

    buckets = aw.get_buckets()
    print(f"Buckets: {buckets.keys()}")

    src_id = input("Source bucket ID: ")
    dest_id = input("Destination bucket ID: ")

    src_events = aw.get_events(src_id)
    print(f"✓ src events: {len(src_events)}")
    dest_events = aw.get_events(dest_id)
    print(f"✓ dest events: {len(dest_events)}")

    print("Checking overlap...")
    overlaps = list(_intersecting_eventpairs(src_events, dest_events))
    if overlaps:
        total_duration_src = sum((e.duration for e in src_events), timedelta())
        total_overlap = sum((tp.duration for _, _, tp in overlaps),
                            timedelta())
        print(
            f"Buckets had overlap ({total_overlap} out of {total_duration_src}), can't safely merge, exiting."
        )
        exit(1)
    print("No overlap detected, continuing...")

    print("You want to merge these two buckets:")
    print(f" - {src_id}")
    print(f" - {dest_id}")
    print(
        "WARNING: you should backup/export the buckets before attempting this operation"
    )
    if input("Does that look right? (y/n): ") != "y":
        print("Aborting")
        exit(1)

    print("Inserting source events into destination bucket...")
    aw.insert_events(dest_id, src_events)

    print("Operation complete")
    if input("Do you want to delete the source bucket? (y/n): ") == "y":
        aw.delete_bucket(src_id)
        print("Bucket deleted")

    print("Exiting")
Esempio n. 5
0
def query():
    td1d = timedelta(days=1)
    day_offset = timedelta(hours=4)

    now = datetime.now(tz=timezone.utc)
    # TODO: Account for timezone, or maybe it's handled correctly by aw_client?
    today = datetime.combine(now.date(), time()) + day_offset

    timeperiods = [(today - i * td1d, today - (i - 1) * td1d) for i in range(5)]
    timeperiods.reverse()

    categories: List[Tuple[List[str], Dict]] = [
        (
            ["Work"],
            {
                "type": "regex",
                "regex": r"activitywatch|algobit|defiarb|github.com",
                "ignore_case": True,
            },
        )
    ]

    aw = aw_client.ActivityWatchClient()

    # TODO: Move this query somewhere else, as the equivalent of aw-webui's 'canonicalEvents'
    res = aw.query(
        f"""
    window = flood(query_bucket(find_bucket("aw-watcher-window_")));
    afk = flood(query_bucket(find_bucket("aw-watcher-afk_")));
    events = filter_period_intersect(window, filter_keyvals(afk, "status", ["not-afk"]));
    events = categorize(events, {json.dumps(categories)});
    events = filter_keyvals(events, "$category", [["Work"]]);
    duration = sum_durations(events);
    RETURN = {{"events": events, "duration": duration}};
    """,
        timeperiods,
    )

    for break_time in [0, 5 * 60, 10 * 60, 15 * 60]:
        _print(
            timeperiods, res, break_time, {"category_rule": categories[0][1]["regex"]}
        )

    save = True
    if save:
        fn = "working_hours_events.json"
        with open(fn, "w") as f:
            print(f"Saving to {fn}...")
            json.dump(res, f, indent=2)
Esempio n. 6
0
def test_fullDesktopQuery():
    params = DesktopQueryParams(
        bid_window="aw-watcher-window_",
        bid_afk="aw-watcher-afk_",
    )
    now = datetime.now(tz=timezone.utc)
    start = now - timedelta(days=7)
    end = now
    timeperiods = [(start, end)]
    query = fullDesktopQuery(params)

    awc = aw_client.ActivityWatchClient("test")
    res = awc.query(query, timeperiods)[0]
    events = res["events"]
    print(len(events))
Esempio n. 7
0
def query(regex: str = EXAMPLE_REGEX, save=True):
    print("Querying events...")
    td1d = timedelta(days=1)
    day_offset = timedelta(hours=4)
    print(f"  Day offset: {day_offset}")
    print("")

    now = datetime.now().astimezone()
    today = (datetime.combine(now.date(), time()) + day_offset).astimezone()

    timeperiods = [(today - i * td1d, today - (i - 1) * td1d)
                   for i in range(5)]
    timeperiods.reverse()

    categories: List[Tuple[List[str], Dict]] = [(
        ["Work"],
        {
            "type": "regex",
            "regex": regex,
            "ignore_case": True,
        },
    )]

    aw = aw_client.ActivityWatchClient()

    canonicalQuery = queries.canonicalEvents(
        queries.DesktopQueryParams(
            bid_window="aw-watcher-window_",
            bid_afk="aw-watcher-afk_",
        ))
    query = f"""
    {canonicalQuery}
    duration = sum_durations(events);
    RETURN = {{"events": events, "duration": duration}};
    """

    res = aw.query(query, timeperiods)

    for break_time in [0, 5 * 60, 10 * 60, 15 * 60]:
        _print(timeperiods, res, break_time,
               {"category_rule": categories[0][1]["regex"]})

    if save:
        fn = "working_hours_events.json"
        with open(fn, "w") as f:
            print(f"Saving to {fn}...")
            json.dump(res, f, indent=2)
Esempio n. 8
0
def main(testing: bool):
    logging.basicConfig(level=logging.INFO)
    logger.info("Starting watcher...")
    client = aw_client.ActivityWatchClient("aw-watcher-input", testing=testing)
    client.connect()

    # Create bucjet
    bucket_name = "{}_{}".format(client.client_name, client.client_hostname)
    eventtype = "os.hid.input"
    client.create_bucket(bucket_name, eventtype, queued=False)
    poll_time = 1

    keyboard = KeyboardListener()
    keyboard.start()
    mouse = MouseListener()
    mouse.start()

    now = datetime.now(tz=timezone.utc)

    while True:
        last_run = now
        sleep(poll_time)
        now = datetime.now(tz=timezone.utc)

        # If input:    Send a heartbeat with data, ensure the span is correctly set, and don't use pulsetime.
        # If no input: Send a heartbeat with all-zeroes in the data, use a pulsetime.
        # FIXME: Doesn't account for scrolling
        # FIXME: Counts both keyup and keydown
        keyboard_data = keyboard.next_event()
        mouse_data = mouse.next_event()
        merged_data = dict(**keyboard_data, **mouse_data)
        e = Event(timestamp=last_run, duration=(now - last_run), data=merged_data)

        pulsetime = 0.0
        if all(map(lambda v: v == 0, merged_data.values())):
            pulsetime = poll_time + 0.1
            logger.info("No new input")
        else:
            logger.info(f"New input: {e}")

        client.heartbeat(bucket_name, e, pulsetime=pulsetime, queued=True)
Esempio n. 9
0
def main():
    api = aw_client.ActivityWatchClient("aw-syncserver")
    buckets = api.get_buckets()

    for bucket_id in buckets:
        events = api.get_events(bucket_id, limit=-1)
        print("bucket: {}\nevent count: {}".format(bucket_id, len(events)))

        print("# Unfiltered")
        compress(events, method="zstd")
        compress(events, method="zlib")

        if False:
            print("# Filtered")
            compress(filter_short(events))

        print("# Chunked (n=1000)")
        bench_chunks(list(chunked(events, 1000)), "zstd")

        print("# Chunked (by date)")
        bench_chunks(list(chunked_by_date(events)), "zstd")

        print("=" * 20)
Esempio n. 10
0
def import_to_awserver(bucket):
    awc = aw_client.ActivityWatchClient('smartertime2activitywatch',
                                        testing=True)
    buckets = json.loads(json.dumps({"buckets": [bucket]}, default=default))
    awc._post('import', buckets)
Lists the most common words among uncategorized events, by duration, to help in creating categories.

This might make more sense as a notebook.
"""

from collections import Counter
from datetime import datetime, timedelta, timezone
from tabulate import tabulate
from typing import Dict

from aw_core import Event
import aw_client
from aw_client import queries

# set up client
awc = aw_client.ActivityWatchClient("test")


def get_events():
    """
    Retrieves AFK-filtered events, only returns events which are Uncategorized.
    """

    start = datetime(2022, 1, 1, tzinfo=timezone.utc)
    now = datetime.now(tz=timezone.utc)
    timeperiods = [(start, now)]

    # TODO: Use tools in aw-research to load categories from toml file
    categories = [
        (
            ["Work"],
Esempio n. 12
0
def main():
    now = datetime.now()
    td1day = timedelta(days=1)
    td1yr = timedelta(days=365)

    parser = argparse.ArgumentParser(prog="aw-cli", description='A CLI utility for interacting with ActivityWatch.')
    parser.set_defaults(which='none')
    parser.add_argument('--host', default="localhost:5600", help="Host to use, on the format HOSTNAME:PORT")

    subparsers = parser.add_subparsers(help='sub-command help')

    parser_heartbeat = subparsers.add_parser('heartbeat', help='Send a heartbeat to the server')
    parser_heartbeat.set_defaults(which='heartbeat')
    parser_heartbeat.add_argument('--pulsetime', default=60, help='Pulsetime to use')

    parser_buckets = subparsers.add_parser('buckets',
                                           help='List all buckets')
    parser_buckets.set_defaults(which='buckets')

    parser_buckets = subparsers.add_parser('events',
                                           help='Query events from bucket')
    parser_buckets.set_defaults(which='events')
    parser_buckets.add_argument('bucket')

    parser_query = subparsers.add_parser('query',
                                         help='Query events from bucket')
    parser_query.set_defaults(which='query')
    parser_query.add_argument('path')
    parser_query.add_argument('--name')
    parser_query.add_argument('--cache', action='store_true')
    parser_query.add_argument('--json', action='store_true', help='Output resulting JSON')
    parser_query.add_argument('--start', default=now - td1day, type=_valid_date)
    parser_query.add_argument('--end', default=now + 10 * td1yr, type=_valid_date)

    args = parser.parse_args()
    # print("Args: {}".format(args))

    client = aw_client.ActivityWatchClient(host=args.host)

    if args.which == "heartbeat":
        raise NotImplementedError
    elif args.which == "buckets":
        buckets = client.get_buckets()
        print("Buckets:")
        for bucket in buckets:
            print(" - {}".format(bucket))
    elif args.which == "events":
        events = client.get_events(args.bucket)
        print("events:")
        for e in events:
            print(" - {} ({}) {}".format(e.timestamp.replace(tzinfo=None, microsecond=0), str(e.duration).split(".")[0], e.data))
    elif args.which == "query":
        with open(args.path) as f:
            query = f.read()
        result = client.query(query, args.start, args.end, cache=args.cache, name=args.name)
        if args.json:
            print(json.dumps(result))
        else:
            for period in result:
                print("Showing 10 out of {} events:".format(len(period)))
                for event in period[:10]:
                    event.pop("id")
                    event.pop("timestamp")
                    print(" - Duration: {} \tData: {}".format(str(timedelta(seconds=event["duration"])).split(".")[0], event["data"]))
                print("Total duration:\t", timedelta(seconds=sum(e["duration"] for e in period)))
    else:
        parser.print_help()
Esempio n. 13
0
def query() -> List[Event]:
    awc = aw_client.ActivityWatchClient(testing=False)
    hostname = "erb-main2-arch"

    # Rough start of first EEG data collection
    start = datetime(2020, 9, 20, tzinfo=timezone.utc)
    stop = datetime.now(tz=timezone.utc)

    def cat_re(re_str):
        return {"type": "regex", "regex": re_str}

    # Basic set of categories to use as labels
    # TODO: Add assert to ensure all categories have matching events
    # FIXME: For some reason escape sequences don't work, might just be how strings are interpolated into the query.
    categories = [
        [["Editing"], cat_re("NVIM")],
        [["Editing", "Code"], cat_re(r"[.](py|rs|js|ts)")],
        [["Editing", "Prose"], cat_re(r"[.](tex|md)")],
        [["Reading docs"], cat_re("readthedocs.io")],
        [["Stack Overflow"], cat_re("Stack Overflow")],
        [["GitHub", "Pull request"], cat_re(r"Pull Request #[0-9]+")],
        [["GitHub", "Issues"], cat_re(r"Issue #[0-9]+")],
        # NOTE: There may be a significant difference between scrolling on the landing page and actually watching videos
        [["YouTube"], cat_re("YouTube")],
        [["Twitter"], cat_re("Twitter")],
        [["Markets"], cat_re("tradingview.com")],
    ]

    query = """
    events = flood(query_bucket("aw-watcher-window_{hostname}"));
    not_afk = flood(query_bucket("aw-watcher-afk_{hostname}"));
    not_afk = filter_keyvals(not_afk, "status", ["not-afk"]);
    events = filter_period_intersect(events, not_afk);
    events = categorize(events, {categories});
    cat_events = sort_by_duration(merge_events_by_keys(events, ["$category"]));
    RETURN = {
        "events": events,
        "duration_by_cat": cat_events
    };
    """

    # Insert parameters
    # (not done with f-strings since they don't like when there's other {...}'s in the string, and I don't want to {{...}})
    query = query.replace("{hostname}", hostname)
    query = query.replace("{categories}", json.dumps(categories))

    print("Querying aw-server...")
    data = awc.query(query, [(start, stop)])

    # Since we're only querying one timeperiod
    result: dict = data[0]

    # pprint(result, depth=1)
    # pprint(result["events"][0])

    # Transform to Event
    events = [Event(**e) for e in result["events"]]
    duration_by_cat = [Event(**e) for e in result["duration_by_cat"]]

    print("Time by category:")
    print_events(duration_by_cat, lambda e: e["data"]["$category"])

    return events