Exemple #1
0
def create_demo_data():
    """
    Create the demo data if it is not already there.
    """

    # Make the current dir the dir this file is in.
    known_dir, tail = os.path.split(__file__)
    if known_dir == '':
        known_dir = '.'
    known_dir = os.path.abspath(known_dir)
    os.chdir(known_dir)
    files = os.listdir(known_dir)
    plot_files = [x for x in files if x[-2:] == 'py']
    #plot_files = [x for x in plot_files if x[:12] == 'little_plot_']
    plot_files = [x for x in plot_files if x[:5] == 'plot_']
    print "plot_files", plot_files
    for plot_file in plot_files:
        THE_PARAM_T = parse_in_parameters.create_parameter_data(plot_file)
        plot_output_dir = os.path.join(THE_PARAM_T['output_dir'])
        files = os.listdir(plot_output_dir)
        demo_files = [x for x in files if x[-3:] == 'txt']
        print "len(demo_files)", len(demo_files)
        if len(demo_files) <= 4:  # 4 is the magic number
            # Assume the plot data has not been created.
            # Run the plot scenario
            analysis.main(os.path.join(plot_file))
def run_scenarios(dir, file_start='plot_', extension='.py'):
    """
    Run all of the python files starting with [file_start] in a directory,.
    """

    # Make the current dir the dir this file is in.
    #known_dir, tail = os.path.split(__file__)
    # if known_dir == '':
      #  known_dir = '.'
    known_dir = os.path.abspath(dir)
    current_dir = os.getcwd()
    os.chdir(known_dir)
    plot_files = os.listdir(known_dir)
    plot_files = [x for x in plot_files if x.endswith(extension)]
    plot_files = [x for x in plot_files if 0 == string.find(x, file_start)]
    for plot_file in plot_files:
        eqrm_flags = parse_in_parameters.create_parameter_data(plot_file)
        plot_output_dir = os.path.join(eqrm_flags['output_dir'])
        files = os.listdir(plot_output_dir)
        demo_files = [x for x in files if x[-3:] == 'txt']
        # print "len(demo_files)", len(demo_files)
        if len(demo_files) <= 4:  # 4 is the magic number
            # Assume the plot data has not been created.
            # Run the plot scenario
            analysis.main(os.path.join(plot_file))
    os.chdir(current_dir)
Exemple #3
0
def create_demo_data():
    """
    Create the demo data if it is not already there.
    """
    
    # Make the current dir the dir this file is in.
    known_dir, tail = os.path.split(__file__)
    if known_dir == '':
        known_dir = '.'
    known_dir = os.path.abspath(known_dir)
    os.chdir(known_dir)
    files = os.listdir(known_dir)
    plot_files = [x for x in files if x[-2:] == 'py']
    #plot_files = [x for x in plot_files if x[:12] == 'little_plot_']
    plot_files = [x for x in plot_files if x[:5] == 'plot_']
    print "plot_files", plot_files
    for plot_file in plot_files:
        THE_PARAM_T = parse_in_parameters.create_parameter_data(plot_file)
        plot_output_dir = os.path.join(THE_PARAM_T['output_dir'])
        files = os.listdir(plot_output_dir)
        demo_files = [x for x in files if x[-3:] == 'txt']
        print "len(demo_files)", len(demo_files)
        if len(demo_files) <= 4: # 4 is the magic number
            # Assume the plot data has not been created.
            # Run the plot scenario
            analysis.main(os.path.join(plot_file))
Exemple #4
0
def run_scenarios(dir, file_start='plot_', extension='.py'):
    """
    Run all of the python files starting with [file_start] in a directory,.
    """

    # Make the current dir the dir this file is in.
    #known_dir, tail = os.path.split(__file__)
    # if known_dir == '':
    #  known_dir = '.'
    known_dir = os.path.abspath(dir)
    current_dir = os.getcwd()
    os.chdir(known_dir)
    plot_files = os.listdir(known_dir)
    plot_files = [x for x in plot_files if x.endswith(extension)]
    plot_files = [x for x in plot_files if 0 == string.find(x, file_start)]
    for plot_file in plot_files:
        eqrm_flags = parse_in_parameters.create_parameter_data(plot_file)
        plot_output_dir = os.path.join(eqrm_flags['output_dir'])
        files = os.listdir(plot_output_dir)
        demo_files = [x for x in files if x[-3:] == 'txt']
        # print "len(demo_files)", len(demo_files)
        if len(demo_files) <= 4:  # 4 is the magic number
            # Assume the plot data has not been created.
            # Run the plot scenario
            analysis.main(os.path.join(plot_file))
    os.chdir(current_dir)
Exemple #5
0
def multi_run(runs):
    """
    Run several simulations

    Arg:
    runs - A list of dictionaries.
        Dic; "processes"; The number of processes
             "sdp"; The event control file parameters, as attributes on an
                    object

    """
    for run in runs:
        # get a temporary file
        (handle, control_file) = tempfile.mkstemp(
            '.py', 'multi_run_generated_')
        os.close(handle)

         # Build the base eqrm_flags.
        flags = create_parameter_data(run["sdp"])
        num_nodes = run["processes"]
        # Write an EQRM control file, then do an mpi run call
        eqrm_flags_to_control_file(control_file, flags)
        (cluster, _) = util.get_hostname()

        cmd = mpi_command(cluster, num_nodes, control_file)
        subprocess.call(cmd)

        # clean up
        os.remove(control_file)
Exemple #6
0
def run_scenarios(scenario_dir=SCENARIO_DIR, current_string=CURRENT_STRING,
                  extension='.py', files=None):
    """
    Run all of the the scenario's in the scenario_dir.

    parameters:
      scenario_dir: The path to the directory of scenarios
      current_string: Used in the timing file name
      extension: The last three characters of the scenario files.
      Bad hack to represent the file extension of the scenarios.
      Currently .py or par.

    Write timings and svn info to a file.
    """
    timings = {}
    delimiter = ','
    ofile = 'imp_test_performance.asc'
    fd = open(ofile, 'a')
    fd.write("version" + delimiter +
             "last_check_in_date" + delimiter +
             "modification_status" + delimiter +
             "scenario_file" + delimiter +
             "scenario_time" + "\n")
    files = par_files(scenario_dir, extension=extension, files=files)
    output_dirs = []
    for file in files:
        pull_path = join(scenario_dir, file)
        eqrm_flags = parse_in_parameters.create_parameter_data(pull_path)
        output_dirs.append(join(eqrm_flags['output_dir']))
        print "Running scenario", file
        # Initial time and memory
        t0 = time.clock()

        # Run the scenario
        analysis.main(pull_path, parallel_finalise=False)

        # Run post-processing (if needed)
        if eqrm_flags['save_motion']:
            postprocessing.generate_motion_csv(eqrm_flags['output_dir'],
                                               eqrm_flags['site_tag'],
                                               soil_amp=False)
            if eqrm_flags['use_amplification']:
                postprocessing.generate_motion_csv(eqrm_flags['output_dir'],
                                                   eqrm_flags['site_tag'],
                                                   soil_amp=True)

        root, ext = splitext(file)
        time_taken_sec = (time.clock() - t0)
        timings[root] = time_taken_sec
        version, date, modified = get_version()
        fd.write(str(version) + delimiter +
                 str(date) + delimiter +
                 str(modified) + delimiter +
                 str(file) + delimiter +
                 str(time_taken_sec) + "\n")
    fd.close()
    timings = Scenario_times(timings, current_string=current_string)
    return timings, output_dirs
Exemple #7
0
def generate_motion_csv(output_dir, site_tag, soil_amp):
    """Produce scenario motion CSV files.
    (previously produced if save_motion=True)

    Parameters:
    output_dir - path to directory where the simulation data has been produced,
                 and where the output files will be placed
    site_tag   - used to identify the appropriate data as input
    is_bedrock - if True use bedrock results, else use soil results
    """

    # EQRM flags
    eqrm_flags = create_parameter_data(os.path.join(output_dir, "eqrm_flags.py"))

    # Ground motion
    motion, atten_periods = load_motion(output_dir, site_tag, soil_amp, file_format="binary")

    return save_motion_to_csv(soil_amp, eqrm_flags, motion)
Exemple #8
0
def multi_run(runs):
    """
    Run several simulations
    
    Arg:
    runs - A list of dictionaries. 
        Dic; "processes"; The number of processes
             "sdp"; The event control file parameters
             
    """
    for run in runs:
        control_file = 'temp.py'
         # Build the base eqrm_flags.
        flags = create_parameter_data(run["sdp"])
        num_nodes = run["processes"]
        # Write an EQRM control file, then do an mpi run call
        eqrm_flags_to_control_file(control_file, flags)
        (cluster, _) = util.get_hostname()
    
        cmd = mpi_command(cluster, num_nodes, control_file)
        subprocess.call(cmd)
def generate_motion_csv(output_dir,
                        site_tag,
                        soil_amp):
    """Produce scenario motion CSV files.
    (previously produced if save_motion=True)

    Parameters:
    output_dir - path to directory where the simulation data has been produced,
                 and where the output files will be placed
    site_tag   - used to identify the appropriate data as input
    is_bedrock - if True use bedrock results, else use soil results
    """

    # EQRM flags
    eqrm_flags = create_parameter_data(
        os.path.join(output_dir, 'eqrm_flags.py'))

    # Ground motion
    motion, atten_periods = load_motion(output_dir,
                                        site_tag, soil_amp,
                                        file_format='binary')

    return save_motion_to_csv(soil_amp, eqrm_flags, motion)
Exemple #10
0
def create_nci_job(nodes, param_file):
    """
    Creates an NCI job package from the given parameter file and the number of
    nodes specified.
    """
    # Initial node number validation
    if nodes > 8 and nodes % 8 != 0:
        raise Exception('Nodes must be a multiple of 8 if greater than 8.')
    if nodes > LIMIT_NODES:
        raise Exception('The node limit is %s' % LIMIT_NODES)

    # Parse param_file to eqrm_flags
    eqrm_flags = create_parameter_data(param_file)

    # Some validation based on the event_set_handler value
    if eqrm_flags.event_set_handler is 'save':
        raise Exception(
            'Please ensure that event_set_handler is load or generate')
    if eqrm_flags.event_set_handler is not 'load':
        log.info('')
        log.info('event_set_handler not load. Generating event set for NCI.')
        log.info('')

    # Calculate parameters required for job
    params = calc_params(eqrm_flags)
    req_memory = calc_memory(nodes, params)
    req_jobfs = calc_jobfs(nodes)
    req_walltime = calc_walltime(nodes)

    # Validation based on parameters
    msg = ''
    if req_memory > nodes * LIMIT_MEMORY_MULTIPLIER:
        msg = '%sRequired memory %sMB greater than limit %sMB.\n' % (
            msg, req_memory, nodes * LIMIT_MEMORY_MULTIPLIER)
    if req_jobfs > nodes * LIMIT_JOBFS_MULTIPLIER:
        msg = '%sRequired jobfs %sMB greater than limit %sMB\n' % (
            msg, req_jobfs, nodes * LIMIT_JOBFS_MULTIPLIER)
    if req_walltime > LIMIT_WALLTIME_MULTIPLIER(nodes):
        msg = '%sRequired walltime %ssecs greater than limit %ssecs\n' % (
            msg, req_walltime, LIMIT_WALLTIME_MULTIPLIER(nodes))
    if len(msg) > 0:
        msg += 'Consider reducing the size of your simulation.'
        raise Exception(msg)

    # Create directory to package into
    nci_dir = os.path.join('.', 'nci_job')
    if os.path.exists(nci_dir):
        rmtree(nci_dir)
    os.makedirs(nci_dir)

    log.info('')
    log.info('Saving package to %s' % nci_dir)
    log.info('(replaces current directory if exists)')

    # Copy input, output and save data to the packaged directory
    input_dir = os.path.join(nci_dir, 'input')
    copytree(eqrm_flags.input_dir, input_dir)

    output_dir = os.path.join(nci_dir, 'output')
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    save_dir = os.path.join(nci_dir, 'save')
    copytree(
        os.path.join(eqrm_flags.output_dir,
                     '%s_event_set' % eqrm_flags.site_tag), save_dir)

    # Modify eqrm_flags directories for NCI
    eqrm_flags['input_dir'] = os.path.join('.', 'input')
    eqrm_flags['output_dir'] = os.path.join('.', 'output')
    eqrm_flags['data_array_storage'] = "getenv('PBS_JOBFS')"
    eqrm_flags['event_set_load_dir'] = os.path.join('.', 'save')

    # We always want a load job
    eqrm_flags['event_set_handler'] = "load"

    # Write new setdata file
    eqrm_flags_to_control_file(os.path.join(nci_dir, param_file), eqrm_flags)

    # Write NCI job file
    job_file = open(os.path.join(nci_dir, '%s_job' % param_file), 'w')
    job_file.write('#!/bin/bash\n')
    job_file.write('#PBS -wd\n')
    job_file.write('#PBS -q normal\n')
    job_file.write('#PBS -l ncpus=%s\n' % nodes)
    job_file.write('#PBS -l walltime=%s\n' % req_walltime)
    job_file.write('#PBS -l vmem=%sMB\n' % req_memory)
    job_file.write('#PBS -l jobfs=%sMB\n' % req_jobfs)
    job_file.write('\n')
    job_file.write('mpirun python %s\n' % param_file)
    job_file.close()

    log.info('')
    log.info('Now tar gzip %s and copy to NCI. e.g.' % nci_dir)
    log.info('tar czvf nci_job.tar.gz %s' % nci_dir)
    log.info('scp nci_job.tar.gz <username>@<nci_host>:/short/<project>/jobs/')
    log.info('')
Exemple #11
0
def multi_run(base_control,
              nodes, 
              sites=None,
              max_site_index=None,
              input_dir=None,
              output_dir_funct=None,
              total_events=None, 
              #execute=True, 
              **kwargs):
    """
    Run several simulations
    
    Arg:
    base_control -  An instance with attributes of EQRM paramters.
    input_dir_funct -  a function that can make the input_dir string, 
                       **kwargs is passed in.
    output_dir_funct -  a function that can make the output_dir string, 
                       **kwargs is passed in.
    nodes - a list of the number of nodes to run each simulation on.
            The first value is for the first simulation ect.
    sites - a list of number of sites to run each simulation on.
            The first value is for the first simulation ect.
            The sites chosen will be spread out.
    max_site_index - The number of sites in the site file.
    total_events - a list of the number of events to run each simulation on.
            The first value is for the first simulation ect.
    **kwargs - Each key is an eqrm control file attribute. The value is
            a lists of vaules to go into each simulation.
             The first value is for the first simulation ect.
    """
    control_file = 'temp.py'
    # Build the base eqrm_flags.
    flags = create_parameter_data(base_control)
    
    # First check that all the array sizes are correct
    runs = len(nodes)
    for k, v in kwargs.items():
        if not len(v) == runs:
            msg = k  + " list is length " + len(v) + "," + runs + \
                " was expected." 
            raise ListLengthError(msg)
     
    # Start Looping
    for i, num_nodes in enumerate(nodes):       
        
        new_flags = {}
        for k, v in kwargs.items():
            kwargs_column[k] = v[i]
            
        # Add the kwargs
        flags.update(new_flags)
        
        # Add the directories
        if output_dir_funct is not None:
            flags['output_dir'] = output_dir_funct(**flags)
        if input_dir is not None:
            flags['input_dir'] = input_dir   
        
        # Write an EQRM control file, then do an mpi run call
        eqrm_flags_to_control_file(control_file, flags)
        (cluster, _) = util.get_hostname()
    
        cmd = mpi_command(cluster, num_nodes, control_file)
        subprocess.call(cmd)
Exemple #12
0
def create_nci_job(nodes, param_file):
    """
    Creates an NCI job package from the given parameter file and the number of
    nodes specified.
    """
    # Initial node number validation
    if nodes > 8 and nodes % 8 != 0:
        raise Exception('Nodes must be a multiple of 8 if greater than 8.')
    if nodes > LIMIT_NODES:
        raise Exception('The node limit is %s' % LIMIT_NODES)
        
    # Parse param_file to eqrm_flags
    eqrm_flags = create_parameter_data(param_file)
    
    # Some validation based on the event_set_handler value
    if eqrm_flags.event_set_handler is 'save':
        raise Exception('Please ensure that event_set_handler is load or generate')
    if eqrm_flags.event_set_handler is not 'load':
        log.info('')
        log.info('event_set_handler not load. Generating event set for NCI.')
        log.info('')
        
    
    # Calculate parameters required for job
    params = calc_params(eqrm_flags)
    req_memory = calc_memory(nodes, params)
    req_jobfs = calc_jobfs(nodes)
    req_walltime = calc_walltime(nodes)
    
    # Validation based on parameters
    msg = ''
    if req_memory > nodes * LIMIT_MEMORY_MULTIPLIER:
        msg = '%sRequired memory %sMB greater than limit %sMB.\n' % (msg,
                                                                     req_memory,
                                                nodes * LIMIT_MEMORY_MULTIPLIER)
    if req_jobfs > nodes * LIMIT_JOBFS_MULTIPLIER:
        msg = '%sRequired jobfs %sMB greater than limit %sMB\n' % (msg,
                                                                   req_jobfs,
                                                 nodes * LIMIT_JOBFS_MULTIPLIER)
    if req_walltime > LIMIT_WALLTIME_MULTIPLIER(nodes):
        msg = '%sRequired walltime %ssecs greater than limit %ssecs\n' % (msg,
                                                                   req_walltime,
                                               LIMIT_WALLTIME_MULTIPLIER(nodes))
    if len(msg) > 0:
        msg += 'Consider reducing the size of your simulation.'
        raise Exception(msg)
    
    # Create directory to package into
    nci_dir = os.path.join('.', 'nci_job')
    if os.path.exists(nci_dir):
        rmtree(nci_dir)
    os.makedirs(nci_dir)
    
    log.info('')
    log.info('Saving package to %s' % nci_dir)
    log.info('(replaces current directory if exists)')
    
    # Copy input, output and save data to the packaged directory
    input_dir = os.path.join(nci_dir, 'input')
    copytree(eqrm_flags.input_dir,input_dir)
    
    output_dir = os.path.join(nci_dir, 'output')
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    
    save_dir = os.path.join(nci_dir, 'save')
    copytree(os.path.join(eqrm_flags.output_dir, 
                          '%s_event_set' % eqrm_flags.site_tag), save_dir)
    
    # Modify eqrm_flags directories for NCI
    eqrm_flags['input_dir'] = os.path.join('.', 'input')
    eqrm_flags['output_dir'] = os.path.join('.', 'output')
    eqrm_flags['data_array_storage'] = "getenv('PBS_JOBFS')"
    eqrm_flags['event_set_load_dir'] = os.path.join('.', 'save')
    
    # We always want a load job
    eqrm_flags['event_set_handler'] = "load"
    
    # Write new setdata file
    eqrm_flags_to_control_file(os.path.join(nci_dir, param_file), eqrm_flags)
    
    # Write NCI job file
    job_file = open(os.path.join(nci_dir, '%s_job' % param_file), 'w')
    job_file.write('#!/bin/bash\n')
    job_file.write('#PBS -wd\n')
    job_file.write('#PBS -q normal\n')
    job_file.write('#PBS -l ncpus=%s\n' % nodes)
    job_file.write('#PBS -l walltime=%s\n' % req_walltime)
    job_file.write('#PBS -l vmem=%sMB\n' % req_memory)
    job_file.write('#PBS -l jobfs=%sMB\n' % req_jobfs)
    job_file.write('\n')
    job_file.write('mpirun python %s\n' % param_file)
    job_file.close()
    
    log.info('')
    log.info('Now tar gzip %s and copy to NCI. e.g.' % nci_dir)
    log.info('tar czvf nci_job.tar.gz %s' % nci_dir)
    log.info('scp nci_job.tar.gz <username>@<nci_host>:/short/<project>/jobs/')
    log.info('')