def main():
    f = open("./resources/appheader.md", "r")
    st.markdown(f.read())
    f.close()
    prompt = st.text_area("Input the original text")
    transcript = st.text_area("Input the transcript from the audio")
    duration = st.text_input("Input the duration of the recording in seconds")

    logger.disable("litreading")

    if st.button("Grade"):

        if prompt == "" or transcript == "" or duration == "":
            st.error("Please fill all fields before grading")

        else:
            df = pd.DataFrame(
                {
                    PROMPT_TEXT_COL: [prompt],
                    ASR_TRANSCRIPT_COL: [transcript],
                    DURATION_COL: [float(duration)],
                }
            )

            grade = grade_wcpm(df, model_type="test", baseline_mode=False)

            st.write(f"Your wcpm is {grade[0]}")
Exemple #2
0
def setup_logger(level: Union[str, int] = "DEBUG", ignored: List[str] = ""):
    logging.basicConfig(handlers=[InterceptHandler()],
                        level=logging.getLevelName(level))
    for ignore in ignored:
        logger.disable(ignore)

    logger.info('\u001b[1;94m Logger is successfully configured\u001b[0m')
Exemple #3
0
def setup_logger(level: Union[str, int] = 'DEBUG',
                 ignored: List[str] = '') -> NoReturn:
    logging.basicConfig(handlers=[InterceptHandler()],
                        level=logging.getLevelName(level))
    for ignore in ignored:
        logger.disable(ignore)
    logging.info('Logging is successfully configured')
Exemple #4
0
def main():
    """
    main: the main method, which parses the args from command line and runs the program as requested
    """
    args = run_parser()
    if args.quiet:
        logger.disable(__name__)
def run(
    app: str = typer.Option(default="Spotify", help="Application to track"),
    debug: bool = typer.Option(default=False,
                               is_flag=True,
                               help="To show debug messages or not"),
    traditional: bool = typer.Option(
        default=False,
        is_flag=True,
        help="Translate lyrics into Traditional Chinese if possible",
    ),
):  # pragma: no cover
    {True: logger.enable, False: logger.disable}[debug]("touchbar_lyric")

    if not debug:
        logger.disable("touchbar_lyric")
        logger.disable("__main__")

    media_info = get_info(app)
    if media_info is None:
        return

    songs = universal_search(media_info.name, media_info.artists)

    for song in songs:
        if song.anchor(media_info.position):
            line: str = song.anchor(media_info.position)
            if traditional:
                line = HanziConv.toTraditional(line)
            print(line)
            break
Exemple #6
0
def main():
    logger.remove()
    logger.add(stdout, format='{level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO', filter=lambda record: record['extra']['indent'] == 1)
    logger.add(stdout, format='  {level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO', filter=lambda record: record['extra']['indent'] == 2)
    main_logger = logger.bind(indent=1)

    args = get_argument_parser().parse_args()

    if not args.verbose:
        main_logger.info('Verbose if off. Not logging messages')
        logger.disable('__main__')
        logger.disable('processes')

    main_logger.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    main_logger.info('Loading settings')
    settings = load_settings_file(args.config_file)
    settings_dataset = settings['dataset_creation_settings']
    settings_files_io = settings['dirs_and_files']
    main_logger.info('Settings loaded')

    main_logger.info('Starting Clotho dataset creation')
    create_dataset(
        settings_dataset=settings_dataset,
        settings_dirs_and_files=settings_files_io)
    main_logger.info('Dataset created')
Exemple #7
0
def db(
    ctx: click.Context,
    config: Path,
    verbose: int,
) -> None:
    """DB migration tool for asynpg.
    """

    if verbose == 0:
        logger.disable('asyncpg-migrate')
    else:
        logger.enable('asyncpg-migrate')
        verbosity = {
            1: 'INFO',
            2: 'DEBUG',
            3: 'TRACE',
        }
        logger.add(
            sys.stderr,
            format='{time} {message}',
            filter='asyncpg-migrate',
            level=verbosity.get(verbose, 'TRACE'),
        )

    logger.debug(
        'Flags are config={config}, verbose={verbose}',
        config=config,
        verbose=verbose,
    )

    ctx.ensure_object(dict)
    ctx.obj['configuration_file_path'] = config
Exemple #8
0
def app():
    logger.disable("")
    load_dotenv(find_dotenv())
    if os.getenv("ENV") != "test":
        raise RuntimeError("Set environment variable to env")
    app_object = flask_app.test_client()
    yield app_object
def main(argv=None):
    """Database main function
    """
    args = parse_cmd_arguments(argv)

    if args.disable_log:
        logger.disable("__main__")
        logger.disable("src.database_operations")

    if args.all_products:
        pprint(db.list_all_products())

    elif args.available_products:
        pprint(db.show_available_products())

    elif args.all_customers:
        pprint(db.list_all_customers())

    elif args.drop_collections:
        db.drop_collections()

    elif args.drop_database:
        db.drop_database()

    elif args.rentals_for_customer:
        pprint(db.rentals_for_customer(args.rentals_for_customer))

    elif args.customers_renting_product:
        pprint(db.show_rentals(args.customers_renting_product))

    elif args.insert:
        pprint(db.import_data(*args.insert))
Exemple #10
0
def main():

    # Treat the logging.
    logger.remove()
    logger.add(stdout,
               format='{level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO',
               filter=lambda record: record['extra']['indent'] == 1)
    logger.add(stdout,
               format='  {level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO',
               filter=lambda record: record['extra']['indent'] == 2)
    main_logger = logger.bind(indent=1)

    args = get_argument_parser().parse_args()

    main_logger.info('Doing only dataset creation')

    # Check for verbosity.
    if not args.verbose:
        main_logger.info('Verbose if off. Not logging messages')
        logger.disable('__main__')
        logger.disable('processes')

    main_logger.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    # Load settings file.
    main_logger.info('Loading settings')
    settings = load_settings_file(args.config_file_dataset)
    main_logger.info('Settings loaded')

    # Create the dataset.
    main_logger.info('Starting Clotho dataset creation')
    create_dataset(settings)
    main_logger.info('Dataset created')
Exemple #11
0
def serve():
    import argparse

    import Pyro5
    from loguru import logger

    from pymmcore_plus.remote._serialize import register_serializers
    from pymmcore_plus.remote.server import pyroCMMCore

    parser = argparse.ArgumentParser()
    parser.add_argument("-p",
                        "--port",
                        type=int,
                        default=DEFAULT_PORT,
                        help="port")
    parser.add_argument("--host", default=DEFAULT_HOST)
    parser.add_argument("--verbose", action="store_true", default=VERBOSE)
    args = parser.parse_args()

    if not args.verbose:
        logger.disable("pymmcore_plus")

    register_serializers()
    Pyro5.api.serve(
        {pyroCMMCore: CORE_NAME},
        use_ns=False,
        host=args.host,
        port=args.port,
        verbose=args.verbose,
    )
Exemple #12
0
    def transform(self, training_data: pd.DataFrame) -> pd.DataFrame:
        """
        Transform training data.

        :param training_data: Training data.
        :type training_data: pd.DataFrame
        :return: Transformed training data.
        :rtype: pd.DataFrame
        """
        if not self.use_transform:
            return training_data

        logger.debug(f"Transforming dataset via {self.__class__.__name__}")
        training_data = training_data.copy()
        if self.output_column not in training_data.columns:
            training_data[self.output_column] = None

        logger.disable("dialogy")
        for i, row in tqdm(training_data.iterrows(), total=len(training_data)):
            transcripts = self.make_transform_values(row[self.input_column])
            entities = self.get_entities(transcripts)
            is_empty_series = isinstance(
                row[self.output_column],
                pd.Series) and (row[self.output_column].isnull())
            is_row_nonetype = row[self.output_column] is None
            is_row_nan = pd.isna(row[self.output_column])
            if is_empty_series or is_row_nonetype or is_row_nan:
                training_data.at[i, self.output_column] = entities
            else:
                training_data.at[i, self.output_column] = (
                    row[self.output_column] + entities)
        logger.enable("dialogy")
        return training_data
Exemple #13
0
    def __init__(
        self,
        default_notification_title="Default Title",
        default_notification_message="Default Message",
        default_notification_application_name="Python Application (notify.py)",
        default_notification_icon=None,
        default_notification_audio=None,
        enable_logging=False,
        **kwargs,
    ):
        """Main Notify Class.

        Optional Arugments:
            override_detected_notification_system: Optional Kwarg that allows for the use of overrding the detected notifier.
            disable_logging: Optional Kwarg that will disable stdout logging from this library.
            custom_mac_notificator: Optional Kwarg for a custom mac notifier. (Probably because you want to change the icon.). This is a direct path to the parent directory (.app).

        """

        if not enable_logging:
            logger.disable("notifypy")
        else:
            logger.info("Logging is enabled.")

        if kwargs.get("override_detected_notification_system"):
            """ 
            This optional kwarg allows for the use of overriding the detected notifier.
            Use at your own risk 
            """
            selected_override = kwargs.get("override_detected_notification_system")
            if issubclass(selected_override, BaseNotifier):
                self._notifier_detect = selected_override
            else:
                raise ValueError("Overrided Notifier must inherit from BaseNotifier.")
        else:
            self._notifier_detect = self._selected_notification_system()

        # Initialize.
        self._notifier = self._notifier_detect(**kwargs)

        # Set the defaults.
        self._notification_title = default_notification_title
        self._notification_message = default_notification_message
        self._notification_application_name = default_notification_application_name

        # These defaults require verification
        if default_notification_icon:
            self._notification_icon = self._verify_icon_path(default_notification_icon)
        else:
            self._notification_icon = str(
                os.path.join(os.path.dirname(__file__), "py-logo.png")
            )

        if default_notification_audio:
            self._notification_audio = self._verify_audio_path(
                default_notification_audio
            )
        else:
            self._notification_audio = None
Exemple #14
0
def init_logging(verbosity: int):
    logger.stop()
    logger.add(sys.stderr,
               format=LOGURU_FORMAT,
               level=VERBOSITY_LEVELS.get(verbosity, 'TRACE'),
               backtrace=False)
    logging.basicConfig(level=logging.NOTSET, handlers=[InterceptHandler()])
    logger.disable('aiosqlite.core')
Exemple #15
0
        def run_func(*args, **kwargs):
            if not self.enable_log:
                logger.disable(__name__)
                print(f"Logging Disabled for Function: {func.__name__}")

            result = func(*args)
            logger.enable(__name__)
            return result
Exemple #16
0
def set_logger(is_enable):
    if is_enable:
        logger.enable(MODULE_NAME)
    else:
        logger.disable(MODULE_NAME)

    simplesqlite.set_logger(is_enable)
    subprocrunner.set_logger(is_enable)
Exemple #17
0
def test_log_before_disable(writer):
    logger.add(writer, format="{message}")
    logger.enable("")
    logger.debug("yes")
    logger.disable("tests")
    logger.debug("nope")
    result = writer.read()
    assert result == "yes\n"
Exemple #18
0
def setup():
    logger.add(sys.stderr,
               format="{time} {level} {message}",
               filter="my_module",
               level="INFO")
    logger.add(base_cfg.LOGS_BASE_PATH + "/file_{time}.log")
    logger.disable("sqlalchemy.engine.base")
    logging.basicConfig(handlers=[InterceptHandler()], level=logging.INFO)
Exemple #19
0
def memory_db():
    from mycartable.database import init_database

    logger.disable("")
    db = init_database(Database())
    add_database_to_types(db)

    logger.enable("")
    return db
Exemple #20
0
def test_log_before_disable_f_globals_name_absent(writer,
                                                  f_globals_name_absent):
    logger.add(writer, format="{message}")
    logger.enable(None)
    logger.debug("yes")
    logger.disable(None)
    logger.debug("nope")
    result = writer.read()
    assert result == "yes\n"
Exemple #21
0
def run(args):
    hosts = args.node or ["localhost"] * 3

    if not args.verbose:
        LOG.remove()
        LOG.add(
            sys.stdout,
            format="<green>[{time:YYYY-MM-DD HH:mm:ss.SSS}]</green> {message}",
        )
        LOG.disable("infra")

    LOG.info("Starting {} CCF nodes...".format(len(hosts)))
    if args.enclave_type == "virtual":
        LOG.warning("Virtual mode enabled")

    with infra.ccf.network(hosts=hosts,
                           binary_directory=args.binary_dir,
                           dbg_nodes=args.debug_nodes) as network:
        if args.recover:
            args.label = args.label + "_recover"
            LOG.info("Recovering network from:")
            LOG.info(f" - Ledger: {args.ledger_dir}")
            LOG.info(
                f" - Defunct network public encryption key: {args.network_enc_pubk}"
            )
            LOG.info(f" - Common directory: {args.common_dir}")
            network.start_in_recovery(args, args.ledger_dir, args.common_dir)
            network.recover(args, args.network_enc_pubk)
        else:
            network.start_and_join(args)

        primary, backups = network.find_nodes()
        LOG.info("Started CCF network with the following nodes:")
        LOG.info("  Node [{:2d}] = {}:{}".format(primary.node_id,
                                                 primary.pubhost,
                                                 primary.rpc_port))
        for b in backups:
            LOG.info("  Node [{:2d}] = {}:{}".format(b.node_id, b.pubhost,
                                                     b.rpc_port))

        LOG.info(
            "You can now issue business transactions to the {} application.".
            format(args.package))
        LOG.info(
            "See https://microsoft.github.io/CCF/users/issue_commands.html for more information."
        )
        LOG.warning("Press Ctrl+C to shutdown the network.")

        try:
            while True:
                time.sleep(60)

        except KeyboardInterrupt:
            LOG.info("Stopping all CCF nodes...")

    LOG.info("All CCF nodes stopped.")
Exemple #22
0
def test_disable(writer, name, should_log):
    logger.add(writer, format="{message}")
    logger.disable(name)
    logger.debug("message")
    result = writer.read()

    if should_log:
        assert result == "message\n"
    else:
        assert result == ""
Exemple #23
0
def disable_loguru(logger: Optional[object] = None) -> object:
    """Disable loguru logger or create and disable."""
    if isinstance(logger, logging.Logger):
        return disable_std_logging(logger)
    filterwarnings("ignore")
    if logger is None:
        from loguru import logger
    logger.disable("bllb")
    logger.remove()
    return logger
Exemple #24
0
def setup(lgp: Path, ignore: List[str] = None):
    logger.add(sys.stderr,
               format="{time} {level} {message}",
               filter="my_module",
               level="INFO")
    logger.add(lgp / "file_{time}.log")
    logging.basicConfig(handlers=[InterceptHandler()], level=logging.INFO)
    for i in ignore:
        logger.disable(i)
    logger.disable("sqlalchemy.engine.base")
Exemple #25
0
def set_logger(is_enable, propagation_depth=1):
    if is_enable:
        logger.enable(MODULE_NAME)
    else:
        logger.disable(MODULE_NAME)

    if propagation_depth <= 0:
        return

    dataproperty.set_logger(is_enable, propagation_depth - 1)
Exemple #26
0
def set_logger(is_enable: bool, propagation_depth: int = 1) -> None:
    if is_enable:
        logger.enable(MODULE_NAME)
    else:
        logger.disable(MODULE_NAME)

    if propagation_depth <= 0:
        return

    subprocrunner.set_logger(is_enable, propagation_depth - 1)
Exemple #27
0
def test_f_globals_name_absent_with_others(writer, f_globals_name_absent):
    logger.add(writer, format="{message}")
    logger.info("1")
    logger.enable(None)
    logger.disable("foobar")
    logger.enable("foo.bar")
    logger.disable(None)
    logger.info("2")
    logger.enable("foobar")
    logger.enable(None)
    logger.info("3")
    assert writer.read() == "1\n3\n"
Exemple #28
0
    def set_params(self, **parameters):
        # before sklearn interface, super call was in init:
        # create a Tree (creates root Node, etc)
        super(ArborealTree, self).__init__()

        for parameter, value in parameters.items():
            setattr(self, parameter, value)

        if self.disable_logging_parallel:
            logger.debug("Disabling logging in core")
            logger.disable("core")

        return self
Exemple #29
0
def caplog(caplog: _logging.LogCaptureFixture) -> _logging.LogCaptureFixture:
    class LoguruHandler(logging.Handler):
        def emit(self, record: logging.LogRecord) -> None:
            logging.getLogger(record.name).handle(record)

    logger.enable('axion')
    handler_id = logger.add(
        LoguruHandler(),
        format='{message}',
    )
    yield caplog
    logger.remove(handler_id)
    logger.disable('axion')
Exemple #30
0
def setup_logging(loglevel='info', stream_sink=sys.stdout):
    from . import runtime_state

    if runtime_state.proc_rank != 0:
        logger.disable('veros')
        return

    kwargs = {}
    if sys.stdout.isatty():
        kwargs.update(
            colorize=True
        )
    else:
        kwargs.update(
            colorize=False
        )

    logger.level('TRACE', color='<dim>')
    logger.level('DEBUG', color='<dim><cyan>')
    logger.level('INFO', color='')
    logger.level('WARNING', color='<yellow>')
    logger.level('ERROR', color='<bold><red>')
    logger.level('CRITICAL', color='<bold><red><WHITE>')
    logger.level('SUCCESS', color='<dim><green>')

    config = {
        'handlers': [
            dict(
                sink=stream_sink,
                level=loglevel.upper(),
                format='<level>{message}</level>',
                **kwargs
            )
        ]
    }

    def showwarning(message, cls, source, lineno, *args):
        logger.warning(
            '{warning}: {message} ({source}:{lineno})',
            message=message,
            warning=cls.__name__,
            source=source,
            lineno=lineno
        )

    warnings.showwarning = showwarning

    logger.enable('veros')
    return logger.configure(**config)