Пример #1
0
    def device_auth(self):
        code_data = self.get_device_code()
        if not code_data:
            logger.error("Could not get device code.")
            return

        logger.info(f"Verification URL: {code_data['verification_url']}")
        logger.info(f"User Code: {code_data['user_code']}")
        notify(
            "Open {verification_url} in your browser and enter this code: "
            "{user_code}".format(**code_data), timeout=30, stdout=True,
            category="trakt")

        # automatically open the url in the default browser
        # but we don't want to use terminal-based browsers - most likely not
        # what the user wants
        term_bak = os.environ.pop("TERM", None)
        webbrowser.open(code_data['verification_url'])
        if term_bak is not None:
            os.environ["TERM"] = term_bak

        start = time.time()
        while time.time() - start < code_data['expires_in']:
            if self.get_device_token(code_data['device_code']):
                notify('App authorized successfully.',
                       stdout=True, category="trakt")
                logger.info('App authorized successfully.')
                break
            logger.debug('Waiting for user to authorize the app.')
            time.sleep(int(code_data['interval']))
        else:
            logger.error('Timed out during auth.')
Пример #2
0
 def autoload_cfg(cls):
     template = getattr(cls, 'CONFIG_TEMPLATE', None)
     monitor_cfg = config['players'][cls.name].get(template)
     auto_keys = {k for k, v in monitor_cfg.items() if v == "auto-detect"}
     if not auto_keys:
         return monitor_cfg
     try:
         loaders = getattr(cls, "read_player_cfg")(auto_keys)
     except AttributeError:
         logger.debug(f"Auto val not found for {', '.join(auto_keys)}")
         logger.error(f"Autoload not supported for {cls.name}.")
         raise AutoloadError
     except FileNotFoundError as e:
         raise AutoloadError(src=e.filename)
     while auto_keys:
         param = auto_keys.pop()
         try:
             param_loader = loaders[param]
         except KeyError:
             logger.error(f"Autoload not supported for '{param}'.")
             raise AutoloadError(param)
         try:
             monitor_cfg[param] = param_loader()
             logger.debug(
                 f"Autoloaded {cls.name} {param} = {monitor_cfg[param]}")
         except FileNotFoundError as e:
             raise AutoloadError(src=e.filename)
     return monitor_cfg
Пример #3
0
    def conn_loop(self):
        self.file_handle = win32file.CreateFile(
            self.ipc_path, win32file.GENERIC_READ | win32file.GENERIC_WRITE, 0,
            None, win32file.OPEN_EXISTING, win32file.FILE_FLAG_OVERLAPPED,
            None)
        if self.file_handle == win32file.INVALID_HANDLE_VALUE:
            err = win32api.FormatMessage(win32api.GetLastError())
            logging.error(f"Failed to connect to pipe: {err}")
            self.file_handle = None
            return

        # needed for blocking on read
        overlapped = win32file.OVERLAPPED()
        overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None)

        # needed for transactions
        win32pipe.SetNamedPipeHandleState(self.file_handle,
                                          win32pipe.PIPE_READMODE_MESSAGE,
                                          None, None)
        self.is_running = True
        while self.is_running:
            val = self._call(win32file.ReadFile, self.file_handle,
                             self._read_buf, overlapped)
            if not self.is_running:
                break
            err, data = val
            if err != 0 and err != ERROR_IO_PENDING:
                logger.warning(f"Unexpected read result {err}. Quitting.")
                logger.debug(f"data={bytes(data)}")
                self.is_running = False
                break
            if err == ERROR_IO_PENDING:
                err = win32event.WaitForSingleObject(overlapped.hEvent,
                                                     self.read_timeout)

            if err == win32event.WAIT_OBJECT_0:  # data is available
                data = bytes(data)
                line = data[:data.find(b"\n")]
                self.on_line(line)

            while not self.write_queue.empty():
                # first see if mpv sent some data that needs to be read
                data = self._call(self._read_all_data)
                if not self.is_running:
                    break
                if data:
                    self.on_data(data)
                # cancel all remaining reads/writes. Should be benign
                win32file.CancelIo(self.file_handle)

                write_data = self.write_queue.get_nowait()
                data = self._call(self._transact, write_data)
                if not self.is_running:
                    break
                self.on_line(data[:-1])

        self.is_running = False
        self.file_handle.close()
        self.file_handle = None
        logger.debug('Pipe closed.')
Пример #4
0
def get_trakt_id(title, item_type, year=None):
    required_type = 'show' if item_type == 'episode' else 'movie'

    global trakt_cache
    if not trakt_cache:
        trakt_cache = read_json(TRAKT_CACHE_PATH) or {'movie': {}, 'show': {}}

    trakt_id = trakt_cache[required_type].get(title)
    if trakt_id:
        return trakt_id

    logger.debug(f'Searching trakt: Title: "{title}", Year: {year}')
    results = search(title, [required_type], year)
    if results is None:  # Connection error
        return 0  # Dont store in cache
    elif results == [] or results[0]['score'] < 5:  # Weak or no match
        msg = f'Trakt search yielded no results for the {required_type}, {title}'
        msg += f", Year: {year}" * bool(year)
        logger.warning(msg)
        notify(msg)
        trakt_id = -1
    else:
        trakt_id = results[0][required_type]['ids']['trakt']

    trakt_cache[required_type][title] = trakt_id
    logger.debug(f'Trakt ID: {trakt_id}')
    write_json(trakt_cache, TRAKT_CACHE_PATH)
    return trakt_id
Пример #5
0
 def conn_loop(self):
     self.is_running = True
     self.update_vars()
     self.file_handle = win32file.CreateFile(
         self.ipc_path, win32file.GENERIC_READ | win32file.GENERIC_WRITE, 0,
         None, win32file.OPEN_EXISTING, 0, None)
     while self.is_running:
         try:
             while not self.write_queue.empty():
                 win32file.WriteFile(self.file_handle,
                                     self.write_queue.get_nowait())
         except win32file.error:
             logger.debug('Exception while writing to pipe.', exc_info=True)
             self.is_running = False
             break
         size = win32file.GetFileSize(self.file_handle)
         if size > 0:
             while size > 0:
                 # pipe has data to read
                 _, data = win32file.ReadFile(self.file_handle, 4096)
                 self.on_data(data)
                 size = win32file.GetFileSize(self.file_handle)
         else:
             time.sleep(1)
     win32file.CloseHandle(self.file_handle)
     logger.debug('Pipe closed.')
Пример #6
0
def get_media_info(file_path):
    logger.debug(f"Filepath '{file_path}'")
    file_path = Path(file_path)
    if not whitelist_file(file_path):
        logger.info("File path not in whitelist.")
        return None
    guess = custom_regex(file_path) or use_guessit(file_path)

    if any(key not in guess for key in ('title', 'type')) or \
       (guess['type'] == 'episode' and 'episode' not in guess):
        logger.warning('Failed to parse filename for episode/movie info. '
                       'Consider renaming/using custom regex.')
        return None

    if isinstance(guess['title'], list):
        guess['title'] = " ".join(guess['title'])

    req_keys = ['type', 'title']
    if guess['type'] == 'episode':
        season = guess.get('season', 1)
        if isinstance(season, list):
            logger.warning(
                f"Multiple probable seasons found: ({','.join(season)}). "
                "Consider renaming the folder.")
            return None
        guess['season'] = int(season)
        req_keys += ['season', 'episode']

    return {key: guess[key] for key in req_keys}
Пример #7
0
    def autoload_cfg(cls):
        template = getattr(cls, 'CONFIG_TEMPLATE', None)
        monitor_cfg = config['players'][cls.name].get(template)
        assert monitor_cfg is not None
        auto_keys = {k for k, v in monitor_cfg.items() if v == "auto-detect"}
        if not auto_keys:
            return monitor_cfg
        try:
            loaders = getattr(cls, "read_player_cfg")(auto_keys)
        except AttributeError:
            raise AutoloadError(param=auto_keys,
                                extra_msg=f"Autoload not supported for {cls.name}.")
        except FileNotFoundError as e:
            raise AutoloadError(src=e.filename, extra_msg="File not found")

        while auto_keys:
            param = auto_keys.pop()
            try:
                param_loader = loaders[param]
            except KeyError:
                raise AutoloadError(param,
                                    extra_msg="Autoload not supported for this param")
            try:
                monitor_cfg[param] = param_loader()
                logger.debug(f"Autoloaded {cls.name} {param} = {monitor_cfg[param]}")
            except FileNotFoundError as e:
                raise AutoloadError(param, src=e.filename, extra_msg="File not found")
        return monitor_cfg
Пример #8
0
def device_auth():
    code_data = get_device_code()
    if not code_data:
        logger.error('Failed device auth.')
        sys.exit(1)

    logger.info(f"Verification URL: {code_data['verification_url']}")
    logger.info(f"User Code: {code_data['user_code']}")
    notify("Open {verification_url} in your browser and enter this code: "
           "{user_code}".format(**code_data),
           timeout=60,
           stdout=True)
    webbrowser.open(code_data['verification_url'])

    start = time.time()
    while time.time() - start < code_data['expires_in']:
        token_data = get_device_token(code_data['device_code'])
        if not token_data:
            logger.debug('Waiting for user to authorize the app.')
            time.sleep(int(code_data['interval']))
        else:
            notify('App authorized successfully.', stdout=True)
            logger.info('Device auth successful.')
            break
    else:
        logger.error('Timed out during auth.')
    return token_data
Пример #9
0
    def device_auth(self):
        code_data = self.get_device_code()
        if not code_data:
            logger.error("Could not get device code.")
            return

        logger.info(f"Verification URL: {code_data['verification_url']}")
        logger.info(f"User Code: {code_data['user_code']}")
        notify("Open {verification_url} in your browser and enter this code: "
               "{user_code}".format(**code_data),
               timeout=30,
               stdout=True,
               category="trakt")
        webbrowser.open(code_data['verification_url'])

        start = time.time()
        while time.time() - start < code_data['expires_in']:
            if self.get_device_token(code_data['device_code']):
                notify('App authorized successfully.',
                       stdout=True,
                       category="trakt")
                logger.info('App authorized successfully.')
                break
            logger.debug('Waiting for user to authorize the app.')
            time.sleep(int(code_data['interval']))
        else:
            logger.error('Timed out during auth.')
Пример #10
0
 def delayed_scrobble(self, cleanup=None):
     logger.debug("Delayed scrobble")
     with self.lock:
         if self.scrobble_buf:
             logger.debug(self.scrobble_buf)
             self.scrobble_status(self.scrobble_buf)
         if cleanup:
             cleanup()
Пример #11
0
def get_media_info(file_path):
    logger.debug(f"Filepath '{file_path}'")
    file_path = Path(file_path)
    if not whitelist_file(file_path):
        logger.info("File path not in whitelist.")
        return None
    guess = use_regex and custom_regex(file_path) or use_guessit(file_path)
    logger.debug(f"Guess: {guess}")
    return cleanup_guess(guess)
Пример #12
0
def custom_regex(file_path: str):
    for item_type, patterns in regexes.items():
        for pattern in patterns:
            m = pattern.match(file_path)
            if m:
                logger.debug(f"Matched regex pattern {pattern!r}")
                guess = m.groupdict()
                guess['type'] = item_type
                return guess
Пример #13
0
def custom_regex(file_path):
    path_posix = str(file_path.as_posix())
    for item_type, patterns in regexes.items():
        for pattern in patterns:
            m = re.match(pattern, path_posix)
            if m:
                logger.debug(f"Matched regex pattern '{pattern}' for '{path_posix}'")
                guess = m.groupdict()
                guess['type'] = item_type
                return guess
Пример #14
0
 def __new__(cls, *args, **kwargs):
     try:
         cls.config = cls.autoload_cfg()
     except AutoloadError as e:
         logger.debug(str(e))
         logger.error(f"Config value autoload failed for {cls.name}.")
     except Exception:
         logger.exception(f"Config value autoload failed for {cls.name}.")
     else:
         return super().__new__(cls)
Пример #15
0
 def on_line(self, line: bytes):
     try:
         mpv_json = json.loads(line)
     except json.JSONDecodeError:
         logger.warning('Invalid JSON received. Skipping.', exc_info=True)
         logger.debug(line)
         return
     if 'event' in mpv_json:
         self.handle_event(mpv_json['event'])
     elif 'request_id' in mpv_json:
         self.handle_cmd_response(mpv_json)
Пример #16
0
 def scrobble(self, verb, data):
     logger.debug(f"Scrobbling {verb} at {data['progress']:.2f}% for "
                  f"{data['media_info']['title']}")
     resp = trakt.scrobble(verb, **data)
     if resp:
         self.handle_successful_scrobble(verb, data, resp)
     elif resp is False and verb == 'stop' and data['progress'] > 80:
         logger.warning('Scrobble unsuccessful. Will try again later.')
         self.backlog_cleaner.add(data)
     else:
         logger.warning('Scrobble unsuccessful. Discarding it.')
     self.prev_scrobble = (verb, data)
Пример #17
0
def scrobble(verb, media_info, progress, *args, **kwargs):
    scrobble_data = prepare_scrobble_data(**media_info)
    logger.debug(scrobble_data)
    if not scrobble_data:
        return None
    scrobble_data['progress'] = progress
    scrobble_params = {
        "url": API_URL + '/scrobble/' + verb,
        "headers": get_headers(),
        "json": scrobble_data
    }
    scrobble_resp = safe_request('post', scrobble_params)
    return scrobble_resp.json() if scrobble_resp else False
Пример #18
0
 def __new__(cls, *args, **kwargs):
     try:
         cls.inject_base_config()
         cls.config = cls.autoload_cfg()
     except AutoloadError as e:
         logger.debug(str(e))
         logger.error(f"Config value autoload failed for {cls.name}.")
         notify(f"Check log file. {e!s}", category="exception")
     except Exception:
         msg = f"Config value autoload failed for {cls.name}."
         logger.exception(msg)
         notify(f"{msg} Check log file.", category="exception")
     else:
         return super().__new__(cls)
Пример #19
0
def get_media_info(file_path: str):
    logger.debug(f"Raw filepath {file_path!r}")
    file_path = cleanup_encoding(file_path)
    parsed = urlsplit(file_path)
    file_is_url = False
    guessit_path = file_path
    if is_url(parsed):
        file_is_url = True
        # remove the query and fragment from the url, keeping only important parts
        scheme, netloc, path, _, _ = parsed
        path = unquote(path)  # quoting should only be applied to the path
        file_path = urlunsplit((scheme, netloc, path, "", ""))
        logger.debug(f"Converted to url {file_path!r}")
        # only use the actual path for guessit, skipping other parts
        guessit_path = path
        logger.debug(f"Guessit url {guessit_path!r}")

    if not whitelist_file(file_path, file_is_url):
        logger.info("File path not in whitelist.")
        return None
    if exclude_file(file_path):
        logger.info("Ignoring file.")
        return None
    guess = use_regex and custom_regex(file_path) or use_guessit(guessit_path)
    logger.debug(f"Guess: {guess}")
    return cleanup_guess(guess)
Пример #20
0
 def scrobble_if_state_changed(self, prev, current):
     """
     Possible race conditions:
     1) start_preview, after __preview_duration__ secs, stop_preview
        start_preview starts preview_timer for " secs, with cleanup=exit_preview.
        the stop_preview also triggers exit_preview, both are run parallelly.
     """
     for action in self.decide_action(prev, current):
         logger.debug(f"action={action}")
         if action == "scrobble":
             logger.debug(current)
             self.scrobble_status(current)
         elif action == "stop_previous":
             self.scrobble_queue.put(("stop", prev))
         elif action == "exit_preview":
             self.exit_preview()
         elif action == "enter_preview":
             assert not self.preview and not self.scrobble_buf, "Invalid state"
             self.preview = True
             self.scrobble_buf = current
             self.preview_timer = ResumableTimer(self.preview_duration,
                                                 self.delayed_scrobble,
                                                 (self.exit_preview, ))
             self.preview_timer.start()
         elif action == "pause_preview":
             self.scrobble_buf = current
             self.preview_timer.pause()
         elif action == "resume_preview":
             self.scrobble_buf = current
             self.preview_timer.resume()
         elif action == "enter_fast_pause":
             assert not self.fast_pause, "Invalid state"
             self.fast_pause = True
         elif action == "clear_buf":
             self.clear_timer('fast_pause_timer')
             self.scrobble_buf = None
         elif action == "delayed_play":
             self.clear_timer('fast_pause_timer')
             self.scrobble_buf = current
             self.fast_pause_timer = ResumableTimer(
                 self.fast_pause_duration,
                 self.delayed_scrobble,
                 (self.exit_fast_pause, ),
             )
             self.fast_pause_timer.start()
         elif action == "exit_fast_pause":
             self.exit_fast_pause()
         else:
             logger.warning(f"Invalid action {action}")
Пример #21
0
def whitelist_file(file_path: str,
                   is_url=False,
                   return_path=False) -> Union[bool, str]:
    """Check if the played media file is in the allowed list of paths"""
    if not whitelist:
        return True
    is_whitelisted = whitelist_remote if is_url else whitelist_local
    whitelist_paths = remote_paths if is_url else local_paths

    for path in whitelist_paths:
        if is_whitelisted(path, file_path):
            logger.debug(f"Matched whitelist entry {path!r}")
            return path if return_path else True

    return False
Пример #22
0
    def clear(self):
        self.remove_expired()

        failed = []
        for item in self.backlog:
            logger.debug(f'Adding item to history {item}')
            if trakt.add_to_history(**item):
                logger.info("Successfully added media to history.")
            else:
                failed.append(item)

        self.backlog = failed
        self.save_backlog()

        if self.timer_enabled:
            self.timer.cancel()
            self._make_timer()
Пример #23
0
 def scrobble(self, verb, data):
     logger.debug(f"Progress: {data['progress']:.2}%")
     resp = trakt.scrobble(verb, **data)
     if resp:
         if 'movie' in resp:
             name = resp['movie']['title']
         else:
             name = (resp['show']['title'] +
                     " S{season:02}E{number:02}".format(**resp['episode']))
         msg = f"Scrobble {verb} successful for {name}"
         logger.info(msg)
         notify(msg)
         self.backlog_cleaner.clear()
     elif resp is False and verb == 'stop' and data['progress'] > 80:
         logger.warning('Scrobble unsuccessful. Will try again later.')
         self.backlog_cleaner.add(data)
     else:
         logger.warning('Scrobble unsuccessful.')
Пример #24
0
 def conn_loop(self):
     sock = socket.socket(socket.AF_UNIX)
     try:
         sock.connect(self.ipc_path)
     except ConnectionRefusedError:
         logger.warning("Connection refused. Maybe we retried too soon?")
         return
     self.is_running = True
     sock_list = [sock]
     while self.is_running:
         r, _, _ = select.select(sock_list, [], [], self.read_timeout)
         if r:  # r == [sock]
             # socket has data to be read
             try:
                 data = sock.recv(4096)
             except ConnectionResetError:
                 self.is_running = False
                 break
             if len(data) == 0:
                 # EOF reached
                 self.is_running = False
                 break
             self.on_data(data)
         while not self.write_queue.empty():
             # block until sock can be written to
             _, w, _ = select.select([], sock_list, [], self.write_timeout)
             if not w:
                 logger.warning(
                     "Timed out writing to socket. Killing connection.")
                 self.is_running = False
                 break
             try:
                 sock.sendall(self.write_queue.get_nowait())
             except BrokenPipeError:
                 self.is_running = False
                 break
             else:
                 self.write_queue.task_done()
     sock.close()
     while not self.write_queue.empty():
         self.write_queue.get_nowait()
         self.write_queue.task_done()
     logger.debug('Sock closed')
Пример #25
0
 def scrobble(self, verb, data):
     logger.debug(f"Scrobbling {verb} at {data['progress']:.2f}% for "
                  f"{data['media_info']['title']}")
     resp = trakt.scrobble(verb, **data)
     if resp:
         if 'movie' in resp:
             name = resp['movie']['title']
         else:
             name = (resp['show']['title'] +
                     " S{season:02}E{number:02}".format(**resp['episode']))
         category = 'resume' if self.is_resume(verb, data) else verb
         msg = f"Scrobble {category} successful for {name}"
         logger.info(msg)
         notify(msg, category=f"scrobble.{category}")
         self.backlog_cleaner.clear()
     elif resp is False and verb == 'stop' and data['progress'] > 80:
         logger.warning('Scrobble unsuccessful. Will try again later.')
         self.backlog_cleaner.add(data)
     else:
         logger.warning('Scrobble unsuccessful.')
     self.prev_scrobble = (verb, data)
Пример #26
0
def get_trakt_id(title, item_type, year=None):
    required_type = 'show' if item_type == 'episode' else 'movie'

    global trakt_cache
    if not trakt_cache:
        trakt_cache = read_json(TRAKT_CACHE_PATH) or {'movie': {}, 'show': {}}

    key = f"{title}{year or ''}"

    trakt_id = trakt_cache[required_type].get(key)
    if trakt_id:
        return trakt_id

    logger.debug(
        f'Searching trakt: Title: "{title}"{year and f", Year: {year}" or ""}')
    results = search(title, [required_type], year)
    if results == [] and year is not None:
        # no match, possibly a mismatch in year metadata
        msg = (
            f'Trakt search yielded no results for the {required_type}, {title}, '
            f'Year: {year}. Retrying search without filtering by year.')
        logger.warning(msg)
        notify(msg, category="trakt")
        results = search(title, [required_type])  # retry without 'year'

    if results is None:  # Connection error
        return 0  # Dont store in cache
    elif results == [] or results[0]['score'] < 5:  # Weak or no match
        msg = f'Trakt search yielded no results for the {required_type}, {title}'
        msg += f", Year: {year}" * bool(year)
        logger.warning(msg)
        notify(msg, category="trakt")
        trakt_id = -1
    else:
        trakt_id = results[0][required_type]['ids']['trakt']

    trakt_cache[required_type][key] = trakt_id
    logger.debug(f'Trakt ID: {trakt_id}')
    write_json(trakt_cache, TRAKT_CACHE_PATH)
    return trakt_id
Пример #27
0
 def conn_loop(self):
     self.sock = socket.socket(socket.AF_UNIX)
     self.sock.connect(self.ipc_path)
     self.is_running = True
     while self.is_running:
         r, _, e = select.select([self.sock], [], [], 0.1)
         if r == [self.sock]:
             # socket has data to read
             data = self.sock.recv(4096)
             if len(data) == 0:
                 # EOF reached
                 self.is_running = False
                 break
             self.on_data(data)
         while not self.write_queue.empty():
             # block until self.sock can be written to
             select.select([], [self.sock], [])
             try:
                 self.sock.sendall(self.write_queue.get_nowait())
             except BrokenPipeError:
                 self.is_running = False
     self.sock.close()
     logger.debug('Sock closed')
Пример #28
0
    def get_data(self, url):
        resp = safe_request("get", {"url": url}, self.sess)
        if resp is None:
            return
        # TODO: If we get a 401, clear token and restart plex auth flow
        resp.raise_for_status()
        try:
            data = resp.json()["MediaContainer"]
        except JSONDecodeError:
            logger.exception("Error with decoding")
            logger.debug(resp.text)
            return None

        if data["size"] <= 0:
            return None

        # no user filter
        if not self.config["scrobble_user"] or "User" not in data["Metadata"][
                0]:
            return data["Metadata"][0]

        for metadata in data["Metadata"]:
            if metadata["User"]["title"] == self.config["scrobble_user"]:
                return metadata
Пример #29
0
def get_trakt_id(title, item_type):
    required_type = 'show' if item_type == 'episode' else 'movie'

    logger.debug('Searching cache.')
    trakt_id = trakt_cache[required_type].get(title)
    if trakt_id:
        return trakt_id

    logger.debug('Searching trakt.')
    results = search(title, [required_type])
    if results is None:  # Connection error
        return 0  # Dont store in cache
    elif results == [] or results[0]['score'] < 5:  # Weak or no match
        logger.warning('Trakt search yielded no results.')
        notify('Trakt search yielded no results for ' + title)
        trakt_id = -1
    else:
        trakt_id = results[0][required_type]['ids']['trakt']

    trakt_cache[required_type][title] = trakt_id
    logger.debug(f'Trakt ID: {trakt_id}')
    write_json(trakt_cache, TRAKT_CACHE_PATH)
    return trakt_id
Пример #30
0
            yield from flatten_categories(v, parents)
            parents.pop()
        elif v is True:
            yield '.'.join(parents + [k])


# TODO: Parse this data to allow enabling only subcategories
# Example: scrobble=False, scrobble.stop=True
# currently, user would have to specify all subcategories of scrobble
user_notif_categories = config['general']['enable_notifs'].get()
categories = deepcopy(CATEGORIES)
merge_categories(categories, user_notif_categories)
enabled_categories = set(flatten_categories(categories))

if enabled_categories:
    logger.debug("Notifications enabled for categories: "
                 f"{', '.join(sorted(enabled_categories))}")
    if sys.platform == 'win32':
        from win10toast import ToastNotifier
        toaster = ToastNotifier()
    elif sys.platform == 'darwin':
        import subprocess as sp
    else:
        try:
            from jeepney import DBusAddress, new_method_call
            from jeepney.io.blocking import open_dbus_connection
        except (ImportError, ModuleNotFoundError):
            import subprocess as sp
            notifier = None
        else:
            dbus_connection = open_dbus_connection(bus='SESSION')
            notifier = DBusAddress('/org/freedesktop/Notifications',