Exemple #1
0
def get_timeseries():
    """Get time series data."""

    name = os.path.join(project.output_folder, project.scenario_name+'.sww')
    log.debug('get_timeseries: input SWW file=%s' % name)
    log.debug('get_timeseries: gauge file=%s' % project.gauges)
    anuga.sww2csv_gauges(name, project.gauges, quantities=project.layers_list,
                         verbose=False)
Exemple #2
0
def make_dir_zip(dirname, zipname):
    """Make a ZIP file from a directory.

    dirname  path to directory to zip up
    zipname  path to ZIP file to create
    """

    log.debug('zip -q -r %s %s' % (zipname, dirname))
    os.system('zip -q -r %s %s' % (zipname, dirname))
Exemple #3
0
def get_timeseries():
    """Get time series data"

    Returns a list of generated files.
    """

    # generate the result files
    name = os.path.join(project.output_folder, project.scenario+'.sww')
    log.debug('get_timeseries: input SWW file=%s' % name)
    log.debug('get_timeseries: gauge file=%s' % project.gauge_file)
    anuga.sww2csv_gauges(name, project.gauge_file, quantities=project.layers_list,
                         verbose=False)

    # since ANUGA code doesn't return a list of generated files,
    # look in output directory for 'gauge_*.csv' files.
    glob_mask = os.path.join(project.output_folder, 'gauge_*.csv')
    return glob.glob(glob_mask)
    def get_remote_from_mirrors(remote, local, auth, mirrors):
        '''Get 'remote' from one of 'mirrors', put in 'local'.'''

        # Get a unique date+time string to defeat caching.  The idea is to add
        # this to the end of any URL so proxy sees a different request.
        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')

        # try each mirror when getting file
        for mirror in mirrors:
            log.debug('Fetching remote file %s from mirror %s'
                      % (remote, mirror))

            remote_url = mirror + remote + cache_defeat
            (result, auth) = get_web_file(remote_url, local, auth=auth)
            if result and is_html(local)==False:
                log.debug('Success fetching file %s' % remote)
                return (True, auth)
            log.debug('Failure fetching from %s' % mirror)
            auth = None

        log.debug('Failure fetching file %s' % remote)
        return (False, auth)            
def run_simulation(vtype, sim_obj):
    '''Run a simulation.

    Returns True if all went well, else False.
    '''
    
    # untar the object
    tar_path = os.path.join(Local_Data_Directory, sim_obj)
    log.info('Untarring %s in directory %s ...'
             % (tar_path, Local_Data_Directory))
    untar_file(tar_path, target_dir=Local_Data_Directory)

    # modify project.py template
    log.debug("Creating '%s' version of project.py" % vtype)
    fd = open('project_template.py', 'r')
    project = fd.readlines()
    fd.close()

    new_project = []
    for line in project:
        new_project.append(line.replace('#!SETUP!#', vtype.lower()))
            
    fd = open('project.py', 'w')
    fd.write(''.join(new_project))
    fd.close()
    
    # import new project.py
    import project

    # run the simulation, produce SWW file
    log.info('Running the simulation ...')
    cmd = 'python run_model.py > %s' % RUNMODEL_STDOUT
    log.debug("run_simulation: doing '%s'" % cmd)
    res = os.system(cmd)
    log.debug("run_simulation: res=%d" % res)

    # 'unimport' project.py
    del project

    # check result
    if res != 0:
        log.critical('Simulation failed, check log')

    return res == 0
Exemple #6
0
def run_model():
    """Run a tsunami simulation for a scenario."""

    log.info('@'*90)
    log.info('@ Running simulation')
    log.info('@'*90)

    # Read in boundary from ordered sts file
    event_sts = anuga.create_sts_boundary(project.event_sts)

    # Reading the landward defined points, this incorporates the original
    # clipping polygon minus the 100m contour
    landward_boundary = anuga.read_polygon(project.landward_boundary)

    # Combine sts polyline with landward points
    bounding_polygon_sts = event_sts + landward_boundary

    # Number of boundary segments
    num_ocean_segments = len(event_sts) - 1
    # Number of landward_boundary points
    num_land_points = anuga.file_length(project.landward_boundary)

    # Boundary tags refer to project.landward_boundary
    # 4 points equals 5 segments start at N
    boundary_tags={'back': range(num_ocean_segments+1,
                                 num_ocean_segments+num_land_points),
                   'side': [num_ocean_segments,
                            num_ocean_segments+num_land_points],
                   'ocean': range(num_ocean_segments)}

    # Build mesh and domain
    log.debug('bounding_polygon_sts=%s' % str(bounding_polygon_sts))
    log.debug('boundary_tags=%s' % str(boundary_tags))
    log.debug('project.bounding_maxarea=%s' % str(project.bounding_maxarea))
    log.debug('project.interior_regions=%s' % str(project.interior_regions))
    log.debug('project.meshes=%s' % str(project.meshes))

    domain = anuga.create_domain_from_regions(bounding_polygon_sts,
                                boundary_tags=boundary_tags,
                                maximum_triangle_area=project.bounding_maxarea,
                                interior_regions=project.interior_regions,
                                mesh_filename=project.meshes,
                                use_cache=False,
                                verbose=False)

    domain.geo_reference.zone = project.zone
    log.info('\n%s' % domain.statistics())

    domain.set_name(project.scenario_name)
    domain.set_datadir(project.output_folder)
    domain.set_minimum_storable_height(0.01)  # Don't store depth less than 1cm

    # Set the initial stage in the offcoast region only
    if project.land_initial_conditions:
        IC = anuga.Polygon_function(project.land_initial_conditions,
                                    default=project.tide,
                                    geo_reference=domain.geo_reference)
    else:
        IC = project.tide

    domain.set_quantity('stage', IC, use_cache=True, verbose=False)
    domain.set_quantity('friction', project.friction)
    domain.set_quantity('elevation',
                        filename=project.combined_elevation_filestem+'.pts',
                        use_cache=True, verbose=False, alpha=project.alpha)

    # Setup boundary conditions
    log.debug('Set boundary - available tags: %s' % domain.get_boundary_tags())

    Br = anuga.Reflective_boundary(domain)
    Bt = anuga.Transmissive_stage_zero_momentum_boundary(domain)
    Bd = anuga.Dirichlet_boundary([project.tide, 0, 0])
    Bf = anuga.Field_boundary(project.event_sts+'.sts',
                        domain, mean_stage=project.tide, time_thinning=1,
                        default_boundary=anuga.Dirichlet_boundary([0, 0, 0]),
                        boundary_polygon=bounding_polygon_sts,
                        use_cache=True, verbose=False)

    domain.set_boundary({'back': Br,
                         'side': Bt,
                         'ocean': Bf})

    # Evolve system through time
    t0 = time.time()
    for t in domain.evolve(yieldstep=project.yieldstep,
                           finaltime=project.finaltime,
                           skip_initial_step=False):
        log.info('\n%s' % domain.timestepping_statistics())
        log.info('\n%s' % domain.boundary_statistics(tags='ocean'))

    log.info('Simulation took %.2f seconds' % (time.time()-t0))
Exemple #7
0
def setup_model():
    """Perform sanity checks.

    The checks here can be simpler than for full-blown ANUGA as the directory
    structure is automatically generated.
    """

    # flag - we check many things and then don't proceed if anything wrong
    sanity_error = False               # checked at bottom of this file

    #####
    # check directory Structure
    #####

    if not os.path.exists(project.home):
        log.error("Sorry, data directory '%s' doesn't exist" % project.home)
        sanity_error = True

    if not os.path.exists(project.muxhome):
        log.error("Sorry, MUX directory '%s' doesn't exist" % project.muxhome)
        sanity_error = True

    if not os.path.exists(project.anuga_folder):
        log.error("Sorry, ANUGA directory '%s' doesn't exist"
                  % project.anuga_folder)
        sanity_error = True

    if not os.path.exists(project.topographies_folder):
        log.error("Sorry, topo directory '%s' doesn't exist"
                  % project.topographies_folder)
        sanity_error = True

    if not os.path.exists(project.polygons_folder):
        log.error("Sorry, polygon directory '%s' doesn't exist"
                  % project.polygons_folder)
        sanity_error = True

    if not os.path.exists(project.boundaries_folder):
        log.error("Sorry, boundaries directory '%s' doesn't exist"
                  % project.boundaries_folder)
        sanity_error = True

    if not os.path.exists(project.output_folder):
        log.error("Sorry, outputs directory '%s' doesn't exist"
                  % project.output_folder)
        sanity_error = True

    if not os.path.exists(project.gauges_folder):
        log.error("Sorry, gauges directory '%s' doesn't exist"
                  % project.gauges_folder)
        sanity_error = True

    if not os.path.exists(project.meshes_folder):
        log.error("Sorry, meshes directory '%s' doesn't exist"
                  % project.meshes_folder)
        sanity_error = True

    if not os.path.exists(project.mux_data_folder):
        log.error("Sorry, mux data directory '%s' doesn't exist"
                  % project.mux_data_folder)
        sanity_error = True

    # generate the event.lst file for the event
    get_multimux(project.event, project.multimux_folder, project.mux_input)

    # if multi_mux is True, check if multi-mux file exists
    if project.multi_mux:
        if not os.path.exists(project.mux_input):
            log.error("Sorry, MUX input file '%s' doesn't exist"
                      % project.mux_input)
            sanity_error = True

    if not os.path.exists(project.event_folder):
        log.error("Sorry, you must generate event %s with EventSelection."
                  % project.event)
        sanity_error = True

    #####
    # determine type of run, set some parameters depending on type
    #####

    if project.setup == 'trial':
        project.scale_factor = 100
        project.time_thinning = 96
        project.yieldstep = 240
    elif project.setup == 'basic':
        project.scale_factor = 4
        project.time_thinning = 12
        project.yieldstep = 120
    elif project.setup == 'final':
        project.scale_factor = 1
        project.time_thinning = 4
        project.yieldstep = 60
    else:
        log.error("Sorry, you must set the 'setup' variable to one of:"
                  '   trial - coarsest mesh, fast\n'
                  '   basic - coarse mesh\n'
                  '   final - fine mesh, slowest\n'
                  '\n'
                  "'setup' was set to '%s'" % project.setup)
        sanity_error = True

    #####
    # check for errors detected above.
    #####

    if sanity_error:
        msg = 'You must fix the above errors before continuing.'
        raise Exception(msg)

    #####
    # Reading polygons and creating interior regions
    #####

#    # Create list of land polygons with initial conditions
#    project.land_initial_conditions = []
#    for (filename, MSL) in project.land_initial_conditions_filename:
#        polygon = anuga.read_polygon(os.path.join(project.polygons_folder,
#                                                  filename))
#        project.land_initial_conditions.append([polygon, MSL])

    # Create list of interior polygons with scaling factor
    project.interior_regions = []
    for (filename, maxarea) in project.interior_regions_data:
        polygon = anuga.read_polygon(os.path.join(project.polygons_folder,
                                                  filename))
        project.interior_regions.append([polygon,
                                         maxarea*project.scale_factor])

    # Initial bounding polygon for data clipping
    project.bounding_polygon = anuga.read_polygon(os.path.join(
                                                      project.polygons_folder,
                                                      project.bounding_polygon))
    project.bounding_maxarea = project.bounding_polygon_maxarea \
                               * project.scale_factor

    # Estimate the number of triangles
    log.debug('number_mesh_triangles(%s, %s, %s)'
              % (str(project.interior_regions),
                 str(project.bounding_polygon),
                 str(project.bounding_maxarea)))
    triangle_min = number_mesh_triangles(project.interior_regions,
                                         project.bounding_polygon,
                                         project.bounding_maxarea)

    log.info('minimum estimated number of triangles=%d' % triangle_min)
Exemple #8
0
        weight_factor = 1.0
        mux_weights = weight_factor*num.ones(len(mux_filenames), num.Float)

        order_filename = project.urs_order

        # Create ordered sts file
        anuga.urs2sts(mux_filenames, basename_out=output_dir,
                      ordering_filename=order_filename,
                      weights=mux_weights, verbose=False)

    # report on progress so far
    sts_file = os.path.join(project.event_folder, project.scenario_name)
    log.info('URS boundary file=%s' % sts_file)

    (quantities, elevation, time) = get_sts_gauge_data(sts_file, verbose=False)
    log.debug('%d %d' % (len(elevation), len(quantities['stage'][0,:])))


def run_model():
    """Run a tsunami simulation for a scenario."""

    log.info('@'*90)
    log.info('@ Running simulation')
    log.info('@'*90)

    # Read in boundary from ordered sts file
    event_sts = anuga.create_sts_boundary(project.event_sts)

    # Reading the landward defined points, this incorporates the original
    # clipping polygon minus the 100m contour
    landward_boundary = anuga.read_polygon(project.landward_boundary)
Exemple #9
0
def get_sts_gauge_data(filename, verbose=False):
    """Get gauges (timeseries of index points).
    
    Returns a tuple containing:
        (quantities, elevation, time, gen_files)
    """

    log.debug('get_sts_gauge_data: filename=%s' % filename)

    # prepare list to return generated filenames in
    gen_files = []

    fid = NetCDFFile(filename+'.sts', 'r')      # Open existing file for read
    permutation = fid.variables['permutation'][:]
    x = fid.variables['x'][:] + fid.xllcorner   # x-coordinates of vertices
    y = fid.variables['y'][:] + fid.yllcorner   # y-coordinates of vertices
    points = num.transpose(num.asarray([x.tolist(), y.tolist()]))
    time = fid.variables['time'][:] + fid.starttime
    elevation = fid.variables['elevation'][:]

    basename = 'sts_gauge'
    quantity_names = ['stage', 'xmomentum', 'ymomentum']
    quantities = {}
    for i, name in enumerate(quantity_names):
        quantities[name] = fid.variables[name][:]

    #####
    # Get maximum wave height throughout timeseries at each index point
    #####

    maxname = os.path.join(project.output_folder, 'max_sts_stage.csv')
    gen_files.append(maxname)
    fid_max = open(maxname, 'w')
    fid_max.write('index, x, y, max_stage \n')
    for j in range(len(x)):
        index = permutation[j]
        stage = quantities['stage'][:,j]
        xmomentum = quantities['xmomentum'][:,j]
        ymomentum = quantities['ymomentum'][:,j]

        fid_max.write('%d, %.6f, %.6f, %.6f\n'
                      % (index, x[j], y[j], max(stage)))

    #####
    # Get minimum wave height throughout timeseries at each index point
    #####

    minname = os.path.join(project.output_folder, 'min_sts_stage.csv')
    gen_files.append(minname)
    fid_min = open(minname, 'w')
    fid_min.write('index, x, y, max_stage \n')
    for j in range(len(x)):
        index = permutation[j]
        stage = quantities['stage'][:,j]
        xmomentum = quantities['xmomentum'][:,j]
        ymomentum = quantities['ymomentum'][:,j]

        fid_min.write('%d, %.6f, %.6f, %.6f\n' %(index, x[j], y[j], min(stage)))

        out_file = os.path.join(project.output_folder,
                                basename+'_'+str(index)+'.csv')
        gen_files.append(out_file)
        fid_sts = open(out_file, 'w')
        fid_sts.write('time, stage, xmomentum, ymomentum \n')

        #####
        # End of the get gauges
        #####

        for k in range(len(time)-1):
            fid_sts.write('%.6f, %.6f, %.6f, %.6f\n'
                          % (time[k], stage[k], xmomentum[k], ymomentum[k]))

        fid_sts.close()
    fid.close()

    return (quantities, elevation, time, gen_files)
Exemple #10
0
def run_tsudat(json_data):
    """Run ANUGA on the Amazon EC2.

    json_data  the path to the JSON data file
    """

    # plug our exception handler into the python system
    sys.excepthook = excepthook

    # get JSON data and adorn project object with its data
    adorn_project(json_data)

    # default certain values if not supplied in JSON data
    default_project_values()

    # set logfile to be in run output folder
    if project.debug:
        log.log_logging_level = log.DEBUG
    log.log_filename = os.path.join(project.output_folder, 'ui.log')
    if project.debug:
        dump_project_py()
        
    # do all required data generation before local run
    log.info('#'*80)
    log.info('# Preparing simulation')
    log.info('#'*80)

    log.info('Calling: setup_model()')
    setup_model()

    log.info('Calling: build_elevation()')
    build_elevation()

    log.info('Calling: build_urs_boundary()')
    # create .payload dictionary, 'hpgauges' files are copied up to EC2
    # and then returned in the resultant ZIP S3 file
    project.payload = {}
    gauges = build_urs_boundary(project.mux_input_filename, project.event_sts)
    project.payload['hpgauges'] = gauges

    # determine limits of AOI
    log.info('Calling: get_minmaxAOI()')
    get_minmaxAOI()

    # actually run the simulation
    youngest_input = get_youngest_input()
    sww_file = os.path.join(project.output_folder, project.scenario+'.sww')
    try:
        sww_ctime = os.path.getctime(sww_file)
    except OSError:
        sww_ctime = 0.0         # SWW file not there

    if project.force_run or youngest_input > sww_ctime:
        log.info('#'*80)
        log.info('# Running simulation')
        log.info('#'*80)
        run_model()
        log.info('End of simulation')
    else:
        log.info('#'*80)
        log.info('# Not running simulation')
        log.debug('# SWW file %s is younger than input data' % sww_file)
        log.info('# If you want to force a simulation run, select FORCE RUN')
        log.info('#'*80)

    log.info('#'*80)
    log.info('# Simulation finished')
    log.info('#'*80)

    # now do optional post-run extractions
    if project.get_results_max:
        log.info('~'*80)
        log.info('~ Running export_results_max()')
        log.info('~'*80)
        file_list = export_results_max()
        project.payload['results_max'] = file_list  # add files to output dict
        log.info('export_results_max() has finished')
    else:
        log.info('~'*80)
        log.info('~ Not running export_results_max() - not requested')
        log.info('~'*80)

    if project.get_timeseries:
        log.info('~'*80)
        log.info('~ Running get_timeseries()')
        log.info('~'*80)
        file_list = get_timeseries()
        project.payload['timeseries'] = file_list  # add files to output dict
        # generate plot files
        plot_list = []
        for filename in file_list:
            plot_file = make_stage_plot(filename)
            plot_list.append(plot_file)
        project.payload['timeseries_plot'] = plot_list  # add files to output dict

        log.info('get_timeseries() has finished')
    else:
        log.info('~'*80)
        log.info('~ Not running get_timeseries() - not requested')
        log.info('~'*80)

    # clean up the local filesystem
    dir_path = os.path.join(project.working_directory, project.user)
    log.debug('Deleting work directory: %s' % dir_path)
Exemple #11
0
def run_tsudat(json_data):
    """Run ANUGA on the Amazon EC2.

    json_data  the path to the JSON data file

    Returns the boto instance object for the running image.
    """

    # plug our exception handler into the python system
    sys.excepthook = excepthook

    # get JSON data and adorn project object with its data
    adorn_project(json_data)

    # default certain values if not supplied in JSON data
    default_project_values()

    # set logfile to be in run output folder
    if project.debug:
        log.log_logging_level = log.DEBUG
    log.log_filename = os.path.join(project.output_folder, 'ui.log')
    if project.debug:
        dump_project_py()

    # do all required data generation before EC2 run
    log.info('#'*90)
    log.info('# Preparing simulation')
    log.info('#'*90)

    log.info('Calling: setup_model()')
    setup_model()

    log.info('Calling: build_elevation()')
    build_elevation()

    log.info('Calling: build_urs_boundary()')
    # create .payload dictionary, 'hpgauges' files are copied up to EC2
    # and then returned in the resultant ZIP S3 file
    project.payload = {}
    gauges = build_urs_boundary(project.mux_input_filename, project.event_sts)
    project.payload['hpgauges'] = gauges

    log.info('Calling: get_minmaxAOI()')
    get_minmaxAOI()

    # copy all required python modules to scripts directory
    ec2_name = os.path.join(ScriptsDir, Ec2RunTsuDATOnEC2)
    log.debug("Copying EC2 run file '%s' to scripts directory '%s'."
              % (Ec2RunTsuDAT, ec2_name))
    shutil.copy(Ec2RunTsuDAT, ec2_name)

    for extra in RequiredFiles:
        log.info('Copying %s to S3 scripts directory' % extra)
        shutil.copy(extra, ScriptsDir)

    # dump the current 'projects' object back into JSON, put in 'scripts'
    json_file = os.path.join(ScriptsDir, JsonDataFilename)
    log.info('Dumping JSON to file %s' % json_file)
    dump_json_to_file(project, json_file)
    dump_project_py()

    # bundle up the working directory, put it into S3
    zipname = ('%s-%s-%s-%s.zip'
               % (project.user, project.project,
                  project.scenario, project.setup))
    zip_tmp_dir = tempfile.mkdtemp(prefix='tsudat2_zip_')
    zippath = os.path.join(zip_tmp_dir, zipname)
    log.info('Making zip %s from %s' % (zippath, project.working_directory))
    make_dir_zip(project.working_directory, zippath)
    os.system('ls -l %s' % zip_tmp_dir)

    s3_name = os.path.join(project.InputS3DataDir, zipname)
    try:
        s3 = s3_connect()
        bucket = s3.create_bucket(project.S3Bucket)
        key = bucket.new_key(s3_name)
        log.info('Creating S3 file: %s/%s' % (project.S3Bucket, s3_name))
        key.set_contents_from_filename(zippath)
        log.info('Done!')
        key.set_acl('public-read')
    except boto.exception.S3ResponseError, e:
        log.critical('S3 error: %s' % str(e))
        print('S3 error: %s' % str(e))
        sys.exit(10)
Exemple #12
0
        mux_weights = weight_factor*num.ones(len(mux_filenames), num.Float)

        order_filename = project.urs_order_file

        # Create ordered sts file
        anuga.urs2sts(mux_filenames, basename_out=output_dir,
                      ordering_filename=order_filename,
                      weights=mux_weights, verbose=False)

    # report on progress so far
    sts_file = os.path.join(project.event_folder, project.sts_filestem)
    log.info('STS filestem=%s' % sts_file)

    (quantities, elevation,
     time, gen_files) = get_sts_gauge_data(sts_file, verbose=False)
    log.debug('%d %d' % (len(elevation), len(quantities['stage'][0,:])))

    return gen_files

def define_default(name, default):
    """Check if a project attribute is defined, default it if not.

    name   name of attribute to check (string)
    default  default value if attribute isn't defined
    """

    try:
        eval('project.%s' % name)
    except AttributeError:
        setattr(project, name, default)
    else:
def refresh_local_data(data_objects, target_dir, mirrors):
    '''Update local data objects from the server.

    data_objects:   list of files to refresh
    target_dir:     directory in which to put files
    mirrors:        list of mirror sites to use
    
    Each file has an associated *.digest file used to decide
    if the local file needs refreshing.
    
    Return True if all went well, else False.
    '''

    # decision function to decide if a file contains HTML
    def is_html(filename):
        '''Decide if given file contains HTML.'''
        
        fd = open(filename)
        data = fd.read(1024)
        fd.close()

        if 'DOCTYPE' in data:
            return True
        
        return False

    
    # local function to get remote file from one of mirrors
    def get_remote_from_mirrors(remote, local, auth, mirrors):
        '''Get 'remote' from one of 'mirrors', put in 'local'.'''

        # Get a unique date+time string to defeat caching.  The idea is to add
        # this to the end of any URL so proxy sees a different request.
        cache_defeat = '?' + time.strftime('%Y%m%d%H%M%S')

        # try each mirror when getting file
        for mirror in mirrors:
            log.debug('Fetching remote file %s from mirror %s'
                      % (remote, mirror))

            remote_url = mirror + remote + cache_defeat
            (result, auth) = get_web_file(remote_url, local, auth=auth)
            if result and is_html(local)==False:
                log.debug('Success fetching file %s' % remote)
                return (True, auth)
            log.debug('Failure fetching from %s' % mirror)
            auth = None

        log.debug('Failure fetching file %s' % remote)
        return (False, auth)            
                

    # local function to compare contents of two files
    def files_same(file_a, file_b):
        '''Compare two files to see if contents are the same.'''
        
        fd = open(file_a, 'r')
        data_a = fd.read()
        fd.close()

        fd = open(file_b, 'r')
        data_b = fd.read()
        fd.close()

        return data_a == data_b

        
    # local function to update one data object
    def refresh_object(obj, auth, mirrors):
        '''Update object 'obj' using authentication tuple 'auth'.
        
        Return (True, <updated_auth>) if all went well,
        else (False, <updated_auth>).
        '''

        # create local and remote file paths.
        obj_digest = obj + '.digest'
        
        remote_file = os.path.join(Remote_Data_Directory, obj)
        remote_digest = remote_file + '.digest'
        
        local_file = os.path.join(Local_Data_Directory, obj)
        local_digest = local_file + '.digest'
        
        # see if missing either digest or object .tgz
        if not os.path.exists(local_digest) or not os.path.exists(local_file):
            # no digest or no object, download both digest and object
            (res, auth) = get_remote_from_mirrors(obj_digest, local_digest, auth, mirrors)
            if res:
                (res, auth) = get_remote_from_mirrors(obj, local_file, auth, mirrors)
        else:
            # download object digest to remote data directory
            (res, auth) = get_remote_from_mirrors(obj_digest, remote_digest, auth, mirrors)
            if res:
                if not files_same(local_digest, remote_digest):
                    # digests differ, refresh object
                    shutil.move(remote_digest, local_digest)
                    (res, auth) = get_remote_from_mirrors(obj, local_file, auth, mirrors)

        return (res, auth)

    # create local data directory if required
    log.debug('Creating local directory: %s' % Local_Data_Directory)
    if not os.path.exists(Local_Data_Directory):
        os.mkdir(Local_Data_Directory)

    # clean out remote data copy directory
    log.debug('Cleaning remote directory: %s' % Remote_Data_Directory)
    shutil.rmtree(Remote_Data_Directory, ignore_errors=True)
    os.mkdir(Remote_Data_Directory)

    # success, refresh local files
    auth = None
    result = True
    for data_object in data_objects:
        log.info("Refreshing file '%s'" % data_object)
        log.debug('refresh_local_data: getting %s from mirrors, auth=%s'
                  % (data_object, str(auth)))
        (res, auth) = refresh_object(data_object, auth, mirrors)
        log.debug('refresh_local_data: returned (res,auth)=%s,%s'
                  % (str(res), str(auth)))
        if res == False:
            log.info('Refresh of file %s failed.' % data_object)
            result = False
            # don't use possibly bad 'auth' again,
            # some proxies lock out on repeated failures.
            auth = None

    if result:
        log.critical('Local data has been refreshed.')
    else:
        log.critical('Local data has been refreshed, with one or more errors.')
    log.critical()
    return result
def teardown():
    '''Clean up after validation run.'''

    log.debug('teardown: called')
def run_chennai(sim_id):
    project_root = os.path.abspath(os.path.dirname(__file__))
    if not os.path.exists(project_root):
        os.makedirs(project_root)
    print "project_root = " + project_root

    inputs_dir = '%s/inputs/' % project_root
    if not os.path.exists(inputs_dir):
        os.makedirs(inputs_dir)
    print "inputs_dir = " + inputs_dir

    working_dir = '%s/working/%s/' % (project_root, sim_id)
    if not os.path.exists(working_dir):
        os.makedirs(working_dir)
    print "working_dir = " + working_dir

    outputs_dir = '%s/outputs/%s' % (project_root, sim_id)
    if not os.path.exists(outputs_dir):
        os.makedirs(outputs_dir)
    print "outputs_dir = " + outputs_dir

    # get data
    print "downloading data..."
    urllib.urlretrieve(
        'http://chennaifloodmanagement.org/uploaded/layers/utm44_1arc_v3.tif',
        inputs_dir + 'utm44_1arc_v3.tif'
    )

    print os.listdir(inputs_dir)

    # configure logging TODO: get this working!
    log_location = project_root + '/' + sim_id + '.log'
    open(log_location, 'a').close()
    log.console_logging_level = log.INFO
    log.log_logging_level = log.DEBUG
    log.log_filename = log_location
    print "# log.log_filename is: " + log.log_filename
    print "# log_location is: " + log_location
    log.debug('A message at DEBUG level')
    log.info('Another message, INFO level')

    print "# starting"
    bounding_polygon_01 = [
        [303382.14647903712, 1488780.8996663219],
        [351451.89152459265, 1499834.3704521982],
        [378957.03975921532, 1493150.8764886451],
        [422656.80798244767, 1504204.3472745214],
        [433196.16384805075, 1471300.9923770288],
        [421885.63560203766, 1413463.0638462803],
        [408261.59021479468, 1372590.9276845511],
        [371245.31595511554, 1427344.16669366],
        [316492.0769460068, 1417833.0406686035],
        [303382.14647903712, 1488780.8996663219]
    ]
    boundary_tags_01 = {
        'inland': [0, 1, 2, 6, 7, 8],
        'ocean': [3, 4, 5]
    }
    print "# Create domain:"
    print "# mesh_filename = " + working_dir + 'mesh_01.msh'
    domain = anuga.create_domain_from_regions(bounding_polygon=bounding_polygon_01,
                                              boundary_tags=boundary_tags_01,
                                              mesh_filename=working_dir + 'mesh_01.msh',
                                              maximum_triangle_area=100000,
                                              verbose=True)
    domain.set_name(sim_id)
    domain.set_datadir(outputs_dir)
    poly_fun_pairs = [
            [
                'Extent',
                inputs_dir + 'utm44_1arc_v3.tif'
            ]
    ]
    print "# create topography_function"
    print "input raster = " + inputs_dir + 'utm44_1arc_v3.tif'
    topography_function = qs.composite_quantity_setting_function(
        poly_fun_pairs,
        domain,
        nan_treatment='exception',
    )
    print topography_function
    print "# set_quantity elevation"
    domain.set_quantity('elevation', topography_function)  # Use function for elevation
    domain.set_quantity('friction', 0.03)  # Constant friction
    domain.set_quantity('stage', 1)  # Constant initial stage

    print "# all quantities set"

    print "# Setup boundary conditions"
    Br = anuga.Reflective_boundary(domain)  # Solid reflective wall
    Bt = anuga.Transmissive_boundary(domain)  # Continue all values on boundary
    Bd = anuga.Dirichlet_boundary([-20, 0., 0.])  # Constant boundary values
    Bi = anuga.Dirichlet_boundary([10.0, 0, 0])  # Inflow
    Bw = anuga.Time_boundary(
        domain=domain,  # Time dependent boundary
        function=lambda t: [(10 * sin(t * 2 * pi) - 0.3) * exp(-t), 0.0, 0.0]
    )

    print "# Associate boundary tags with boundary objects"
    domain.set_boundary({'inland': Br, 'ocean': Bd})
    print domain.get_boundary_tags()

    catchmentrainfall = Rainfall(
        domain=domain,
        rate=0.2
    )
    # # Note need path to File in String.
    # # Else assumed in same directory
    domain.forcing_terms.append(catchmentrainfall)

    print "# Evolve system through time"
    counter_timestep = 0
    for t in domain.evolve(yieldstep=300, finaltime=6000):
        counter_timestep += 1
        print counter_timestep
        print domain.timestepping_statistics()

    asc_out_momentum = outputs_dir + '/' + sim_id + '_momentum.asc'
    asc_out_depth = outputs_dir + '/' + sim_id + '_depth.asc'

    anuga.sww2dem(outputs_dir + '/' + sim_id + '.sww',
                  asc_out_momentum,
                  quantity='momentum',
                  number_of_decimal_places=3,
                  cellsize=30,
                  reduction=max,
                  verbose=True)
    anuga.sww2dem(outputs_dir + '/' + sim_id + '.sww',
                  asc_out_depth,
                  quantity='depth',
                  number_of_decimal_places=3,
                  cellsize=30,
                  reduction=max,
                  verbose=True)

    outputs =[asc_out_depth, asc_out_momentum]

    for output in outputs:
        print "# Convert ASCII grid to GeoTiff so geonode can import it"
        src_ds = gdal.Open(output)
        dst_filename = (output[:-3] + 'tif')

        print "# Create gtif instance"
        driver = gdal.GetDriverByName("GTiff")

        print "# Output to geotiff"
        dst_ds = driver.CreateCopy(dst_filename, src_ds, 0)

        print "# Properly close the datasets to flush the disk"
        dst_filename = None
        src_ds = None

    print "Done. Nice work."
Exemple #16
0
def run_tsudat(json_data):
    """"Run ANUGA using data from a json data file."""

    print('Batemans Bay run_tsudat.py')

    def dump_project_py():
        """Debug routine - dump project attributes to the log."""

        # list all project.* attributes
        for key in dir(project):
            if not key.startswith('__'):
                try:
                    log.info('project.%s=%s' % (key, eval('project.%s' % key)))
                except AttributeError:
                    pass

    # plug our exception handler into the python system
    sys.excepthook = excepthook

    # get json data and adorn project object with it's data
    adorn_project(json_data)

    # set logfile to be in run output folder
    if project.debug:
        log.log_logging_level = log.DEBUG
    log.log_filename = os.path.join(project.output_folder, 'tsudat.log')

    # run the tsudat simulation
    if project.debug:
        dump_project_py()

    youngest_input = get_youngest_input()
    sww_file = os.path.join(project.output_folder, project.scenario_name+'.sww')
    try:
        sww_ctime = os.path.getctime(sww_file)
    except OSError:
        sww_ctime = 0.0		# SWW file not there

    if project.force_run or youngest_input > sww_ctime:
        log.info('#'*90)
        log.info('# Running simulation')
        log.info('#'*90)
        setup_model()
#        build_elevation()
        build_urs_boundary(project.mux_input_filename, project.event_sts)
        run_model()
        log.info('End of simulation')
    else:
        log.info('#'*90)
        log.info('# Not running simulation')
        log.debug('# SWW file %s is younger than input data' % sww_file)
        log.info('# If you want to force a simulation run, select FORCE RUN')
        log.info('#'*90)

    # now do optional post-run extractions
    if project.get_results_max:
       log.info('~'*90)
       log.info('~ Running export_results_max()')
       log.info('~'*90)
       export_results_max()
       log.info('export_results_max() has finished')
    else:
       log.info('~'*90)
       log.info('~ Not running export_results_max() - not requested')
       log.info('~'*90)

    if project.get_timeseries:
       log.info('~'*90)
       log.info('~ Running get_timeseries()')
       log.info('~'*90)
       get_timeseries()
       log.info('get_timeseries() has finished')
    else:
       log.info('~'*90)
       log.info('~ Not running get_timeseries() - not requested')
       log.info('~'*90)

    log.info('#'*90)
    log.info('# Simulation finished')
    log.info('#'*90)
        return 1
    lines = fd.readlines()
    fd.close

    output_directory = None
    for line in lines:
        if line.startswith(OUTDIR_PREFIX):
            output_directory = line.replace(OUTDIR_PREFIX, '', 1)
            output_directory = output_directory.strip()
            break
    if output_directory is None:
        log.critical("Couldn't find line starting with '%s' in file '%s'"
                     % (OUTDIR_PREFIX, RUNMODEL_STDOUT))
        return 1

    log.debug('check_that_output_is_as_expected: output_directory=%s'
              % output_directory)
    
    # compare SWW files here and there
    new_output_sww = os.path.join(output_directory, expected_sww)
    #cmd = 'python cmpsww.py %s %s > cmpsww.stdout' % (local_sww, new_output_sww)
    cmd = 'python compare_model_timeseries.py %s %s %e > compare_model_timeseries.stdout' %\
          (local_sww, new_output_sww, epsilon)
    print '-------------------------------------'
    print cmd
    print '-------------------------------------'    
    
    log.debug("check_that_output_is_as_expected: doing '%s'" % cmd)
    res = os.system(cmd)
    log.debug("check_that_output_is_as_expected: res=%d" % res)
    log.critical()
    print 'Result', res