Exemple #1
0
def exec_import_dataset(args):
    """
    Perform basic checks and create necessary objects in preparation to import.
    Import dataset into database.
    Run the analysis and import results into the database.
    Run metrics and import results into the database.
    Return nothing.
    """
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)

    # Create GenericDataset object and set settings.
    dataset = get_dataset(args)

    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(
        dataset.name)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    if args.dry_run:
        # Start the dataset check process
        import_system_utilities.check_dataset(dataset)
    else:
        # Start the import process
        dataset_name = import_system_utilities.\
            import_dataset(database_id, dataset, directories,
            public=args.public, public_documents=args.public_documents,
            verbose=args.verbose)
Exemple #2
0
def exec_migrate_dataset(args):
    """Move a dataset from one database to another."""
    dataset_id = args.dataset_name
    from_database = args.from_database
    to_database = args.to_database

    if from_database == to_database:
        print('The from database is the same as the to database.')

    # get database configurations
    from_database_config = None
    to_database_config = None
    if from_database != 'default':
        from_database_config = get_database_configurations(from_database)
    if to_database != 'default':
        to_database_config = get_database_configurations(to_database)

    # ensure that the working directory is created prior to syncdb
    import_system_utilities.make_working_dir()

    # make sure that both databases exist
    from_db_id = import_system_utilities.run_syncdb(from_database_config)
    to_db_id = import_system_utilities.run_syncdb(to_database_config)

    # run migrate
    import_system_utilities.migrate_dataset(dataset_id, from_db_id, to_db_id)
Exemple #3
0
def exec_run_analysis(args):
    """Run an analysis on the specified dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(
        args.dataset_identifier)

    # create analysis
    analysis = get_analysis(args, directories)

    if args.dry_run:
        import_system_utilities.check_analysis(database_id,
                                               args.dataset_identifier,
                                               analysis,
                                               directories,
                                               verbose=args.verbose)
    else:
        # run an analysis
        analysis_identifier = import_system_utilities.run_analysis(
            database_id,
            args.dataset_identifier,
            analysis,
            directories,
            verbose=args.verbose)
Exemple #4
0
def exec_import_dataset(args):
    """
    Perform basic checks and create necessary objects in preparation to import.
    Import dataset into database.
    Run the analysis and import results into the database.
    Run metrics and import results into the database.
    Return nothing.
    """
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)
    
    # Create GenericDataset object and set settings.
    dataset = get_dataset(args)
    
    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(dataset.name)
        
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    if args.dry_run:
        # Start the dataset check process
        import_system_utilities.check_dataset(dataset)
    else:
        # Start the import process
        dataset_name = import_system_utilities.\
            import_dataset(database_id, dataset, directories, 
            public=args.public, public_documents=args.public_documents, 
            verbose=args.verbose)
Exemple #5
0
def exec_migrate_dataset(args):
    """Move a dataset from one database to another."""
    dataset_id = args.dataset_name
    from_database = args.from_database
    to_database = args.to_database
    
    if from_database == to_database:
        print('The from database is the same as the to database.')
    
    # get database configurations
    from_database_config = None
    to_database_config = None
    if from_database != 'default':
        from_database_config = get_database_configurations(from_database)
    if to_database != 'default':
        to_database_config = get_database_configurations(to_database)
    
    # ensure that the working directory is created prior to syncdb
    import_system_utilities.make_working_dir()
    
    # make sure that both databases exist
    from_db_id = import_system_utilities.run_syncdb(from_database_config)
    to_db_id = import_system_utilities.run_syncdb(to_database_config)
    
    # run migrate
    import_system_utilities.migrate_dataset(dataset_id, from_db_id, to_db_id)
Exemple #6
0
def exec_link(args):
    """Link a dataset to the default database."""
    database_info = get_database_configurations(args.database_config)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    database_id = import_system_utilities.run_syncdb(database_info)
    import_system_utilities.link_dataset(database_id, args.dataset_name)
Exemple #7
0
def exec_link(args):
    """Link a dataset to the default database."""
    database_info = get_database_configurations(args.database_config)
    
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    database_id = import_system_utilities.run_syncdb(database_info)
    import_system_utilities.link_dataset(database_id, args.dataset_name)
Exemple #8
0
def exec_remove_metrics(args):
    """Remove listed metrics from dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)
    
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    import_system_utilities.remove_metrics(database_id, args.dataset_name, args.analysis_name, args.metrics)
Exemple #9
0
def exec_remove_metrics(args):
    """Remove listed metrics from dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)

    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default':  # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)

    import_system_utilities.remove_metrics(database_id, args.dataset_name,
                                           args.analysis_name, args.metrics)
Exemple #10
0
def exec_run_analysis(args):
    """Run an analysis on the specified dataset."""
    # Get database configurations.
    database_info = None
    if args.database_config:
        database_info = get_database_configurations(args.database_config)
    
    # Make sure the tables exist in the database and get a database identifier.
    database_id = import_system_utilities.run_syncdb(database_info)
    # Make sure that the default database exists
    if database_id != 'default': # Check so syncdb isn't run twice in a row for no reason.
        import_system_utilities.run_syncdb(None)
    
    # Get common directories.
    directories = import_system_utilities.get_common_working_directories(args.dataset_identifier)
    
    # create analysis
    analysis = get_analysis(args, directories)
    
    if args.dry_run:
        import_system_utilities.check_analysis(database_id, args.dataset_identifier, analysis, directories, verbose=args.verbose)
    else:
        # run an analysis
        analysis_identifier = import_system_utilities.run_analysis(database_id, args.dataset_identifier, analysis, directories, verbose=args.verbose)