def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    # Load settings file.
    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_sec = logger.bind(is_caption=False, indent=1)

    logger_main.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    logger_main.info('Doing only dataset creation')

    # Create the dataset.
    logger_main.info('Starting Clotho dataset creation')

    logger_sec.info('Creating examples')
    create_dataset(settings_dataset=settings['dataset_creation_settings'],
                   settings_dirs_and_files=settings['dirs_and_files'])
    logger_sec.info('Examples created')

    logger_sec.info('Extracting features')
    extract_features(root_dir=settings['dirs_and_files']['root_dirs']['data'],
                     settings_data=settings['dirs_and_files']['dataset'],
                     settings_features=settings['feature_extraction_settings'])
    logger_sec.info('Features extracted')

    logger_main.info('Dataset created')
示例#2
0
def main():
    logger.remove()
    logger.add(stdout, format='{level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO', filter=lambda record: record['extra']['indent'] == 1)
    logger.add(stdout, format='  {level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO', filter=lambda record: record['extra']['indent'] == 2)
    main_logger = logger.bind(indent=1)

    args = get_argument_parser().parse_args()

    if not args.verbose:
        main_logger.info('Verbose if off. Not logging messages')
        logger.disable('__main__')
        logger.disable('processes')

    main_logger.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    main_logger.info('Loading settings')
    settings = load_settings_file(args.config_file)
    settings_dataset = settings['dataset_creation_settings']
    settings_files_io = settings['dirs_and_files']
    main_logger.info('Settings loaded')

    main_logger.info('Starting Clotho dataset creation')
    create_dataset(
        settings_dataset=settings_dataset,
        settings_dirs_and_files=settings_files_io)
    main_logger.info('Dataset created')
示例#3
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_inner = logger.bind(is_caption=False, indent=1)

    if settings['workflow']['dataset_creation']:
        logger_main.info('Starting creation of dataset')

        logger_inner.info('Creating examples')
        dataset_multiprocess.create_dataset(
            settings_dataset=settings['dataset_creation_settings'],
            settings_dirs_and_files=settings['dirs_and_files'])
        logger_inner.info('Examples created')

        logger_inner.info('Extracting features')
        dataset_multiprocess.extract_features(
            root_dir=settings['dirs_and_files']['root_dirs']['data'],
            settings_data=settings['dirs_and_files']['dataset'],
            settings_features=settings['feature_extraction_settings'])
        logger_inner.info('Features extracted')
        logger_main.info('Creation of dataset ended')
示例#4
0
def main():

    # Treat the logging.
    logger.remove()
    logger.add(stdout,
               format='{level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO',
               filter=lambda record: record['extra']['indent'] == 1)
    logger.add(stdout,
               format='  {level} | [{time:HH:mm:ss}] {name} -- {message}.',
               level='INFO',
               filter=lambda record: record['extra']['indent'] == 2)
    main_logger = logger.bind(indent=1)

    args = get_argument_parser().parse_args()

    main_logger.info('Doing only dataset creation')

    # Check for verbosity.
    if not args.verbose:
        main_logger.info('Verbose if off. Not logging messages')
        logger.disable('__main__')
        logger.disable('processes')

    main_logger.info(datetime.now().strftime('%Y-%m-%d %H:%M'))

    # Load settings file.
    main_logger.info('Loading settings')
    settings = load_settings_file(args.config_file_dataset)
    main_logger.info('Settings loaded')

    # Create the dataset.
    main_logger.info('Starting Clotho dataset creation')
    create_dataset(settings)
    main_logger.info('Dataset created')
示例#5
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(
        file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose,
                 settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)
    logger_inner = logger.bind(is_caption=False, indent=1)

    if settings['workflow']['dataset_creation']:
        logger_main.info('Starting creation of dataset')

        logger_inner.info('Creating examples')
        dataset.create_dataset(
            settings_dataset=settings['dataset_creation_settings'],
            settings_dirs_and_files=settings['dirs_and_files'])
        logger_inner.info('Examples created')

        logger_inner.info('Extracting features')
        dataset.extract_features(
            root_dir=settings['dirs_and_files']['root_dirs']['data'],
            settings_data=settings['dirs_and_files']['dataset'],
            settings_features=settings['feature_extraction_settings'])
        logger_inner.info('Features extracted')
        logger_main.info('Creation of dataset ended')

    if settings['workflow']['dnn_testing']:
        # Create test dataset if not yet created
        test_split_feat_dir = Path(settings['dirs_and_files']['root_dirs']['data']) \
            .joinpath(settings['dirs_and_files']['dataset']['features_dirs']['output'],
                      settings['dirs_and_files']['dataset']['features_dirs']['test'])
        if not test_split_feat_dir.exists():
            logger_main.info('Starting creation of test dataset')
            logger_inner.info('Extracting features')
            dataset.extract_features_test(
                root_dir=settings['dirs_and_files']['root_dirs']['data'],
                settings_data=settings['dirs_and_files']['dataset'],
                settings_features=settings['feature_extraction_settings'],
                settings_audio=settings['dataset_creation_settings']['audio'])
            logger_inner.info('Features extracted')
            logger_main.info('Creation of test dataset ended')
        else:
            logger_inner.info('Found existing test data')

    if settings['workflow']['dnn_training'] or \
            settings['workflow']['dnn_evaluation'] or \
            settings['workflow']['dnn_testing']:
        method.method(settings)
示例#6
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose, settings=settings['logging'])

    method.method(settings)
示例#7
0
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose
    job_id = args.job_id

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    init_loggers(verbose=verbose,
                 settings=settings['dirs_and_files'],
                 job_id=job_id)

    method.method(settings, job_id)
def main():
    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    settings = file_io.load_yaml_file(
        Path(file_dir, f'{config_file}.{file_ext}'))

    printing.init_loggers(verbose=verbose, settings=settings['dirs_and_files'])

    logger_main = logger.bind(is_caption=False, indent=0)

    logger_main.info('Starting method only')
    method(settings)
    logger_main.info('Method\'s done')
    batch_size = 16
    nhead = 4
    nhid = 192
    nlayers = 2
    ninp = 64
    ntoken = 4367 + 1
    clip_grad = 2.5
    lr = 3e-4  # learning rate
    beam_width = 3
    training_epochs = 50
    log_interval = 100
    checkpoint_save_interval = 5

    device = torch.device('cuda:0')

    args = get_argument_parser().parse_args()

    file_dir = args.file_dir
    config_file = args.config_file
    file_ext = args.file_ext
    verbose = args.verbose

    print("load settings start")

    settings = load_yaml_file(Path(file_dir, f'{config_file}.{file_ext}'))

    settings_training = settings['dnn_training_settings']['training'],
    settings_data = settings['dnn_training_settings']['data'],
    settings_io = settings['dirs_and_files']

    indices_list = _load_indices_file(