Exemplo n.º 1
0
def exec_import_dataset(args):
    """
    Perform basic checks and create necessary objects in preparation to import.
    Import dataset into database.
    Run the analysis and import results into the database.
    Run metrics and import results into the database.
    Return nothing.
    """
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)

    # Create GenericDataset object and set settings.
    dataset = get_dataset(args)

    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(
        dataset.name)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    if args.dry_run:
        # Start the dataset check process
        import_system_utilities.check_dataset(dataset)
    else:
        # Start the import process
        dataset_name = import_system_utilities.\
            import_dataset(database_id, dataset, directories,
            public=args.public, public_documents=args.public_documents,
            verbose=args.verbose)
Exemplo n.º 2
0
def exec_import_dataset(args):
    """
    Perform basic checks and create necessary objects in preparation to import.
    Import dataset into database.
    Run the analysis and import results into the database.
    Run metrics and import results into the database.
    Return nothing.
    """
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)
    
    # Create GenericDataset object and set settings.
    dataset = get_dataset(args)
    
    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(dataset.name)
        
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    if args.dry_run:
        # Start the dataset check process
        import_system_utilities.check_dataset(dataset)
    else:
        # Start the import process
        dataset_name = import_system_utilities.\
            import_dataset(database_id, dataset, directories, 
            public=args.public, public_documents=args.public_documents, 
            verbose=args.verbose)
Exemplo n.º 3
0
def exec_run_analysis(args):
    """Run an analysis on the specified dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(
        args.dataset_identifier)

    # create analysis
    analysis = get_analysis(args, directories)

    if args.dry_run:
        import_system_utilities.check_analysis(database_id,
                                               args.dataset_identifier,
                                               analysis,
                                               directories,
                                               verbose=args.verbose)
    else:
        # run an analysis
        analysis_identifier = import_system_utilities.run_analysis(
            database_id,
            args.dataset_identifier,
            analysis,
            directories,
            verbose=args.verbose)
Exemplo n.º 4
0
def exec_run_analysis(args):
    """Run an analysis on the specified dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)
    
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(args.dataset_identifier)
    
    # create analysis
    analysis = get_analysis(args, directories)
    
    if args.dry_run:
        import_system_utilities.check_analysis(database_id, args.dataset_identifier, analysis, directories, verbose=args.verbose)
    else:
        # run an analysis
        analysis_identifier = import_system_utilities.run_analysis(database_id, args.dataset_identifier, analysis, directories, verbose=args.verbose)