Esempio n. 1
0
    def set_source(self, verbose=True):
        """Create eruptive source file

        Requires
        - input file
        - grain file
        - database file
        """

        executable = os.path.join(self.utilities_dir,
                                  'SetSrc', 'SetSrc.PUB.exe')

        logfile = self.logbasepath + '.SetSrc.log'

        if verbose:
            header('Creating eruptive source file (SetSrc)')


        cmd = '%s '*8 % (executable, logfile,
                         self.inputfile,
                         self.sourcefile,
                         self.grainfile,
                         self.databasefile,
                         'FALL3D',    # Taken from hardwired values in Script-SetSrc
                         'YES')


        self.runscript(cmd, 'SetSrc', logfile, lines=5,
                       verbose=verbose)
Esempio n. 2
0
def run_nc2prof(windfield_directory, verbose=True):
    """Run nc2prof - extract wind profiles from NCEP data

    Requires
        - input file
        - NCEP wind files
           TMP.nc
           HGT.nc
           UGRD.nc
           VGRD.nc
    """

    # FIXME: Perhaps include into AIM class (somehow)

    Fall3d_dir = get_fall3d_home()
    utilities_dir = os.path.join(Fall3d_dir, 'Utilities')
    executable = os.path.join(utilities_dir, 'nc2prof', 'nc2prof.exe')

    if verbose:
        header('Running nc2prof in %s' % windfield_directory)

    cmd = 'cd %s; %s ' % (windfield_directory, executable)

    logfile = 'run_nc2prof.log'
    run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
def install_ubuntu_packages():    
    """Get required Ubuntu packages for riab_server.
       It is OK if they are already installed
    """

    header('Installing Ubuntu packages')     
    
    s = 'apt-get clean'
    run(s, verbose=True)
    
    
    for package in ['python-setuptools']:

                    
        s = 'apt-get -y install %s' % package
        
        log_base = '%s_install' % package
        try:
            run(s,
                stdout=log_base + '.out',
                stderr=log_base + '.err',                  
                verbose=True)
        except:
            msg = 'Installation of package %s failed. ' % package
            msg += 'See log file %s.out and %s.err for details' % (log_base, log_base)
            raise Exception(msg)
Esempio n. 4
0
    def set_database(self, verbose=True):
        """Create meteorological database

        Requires
        - input file
        - topography
        - wind profile
        """


        dbsfilename = self.scenario_name + '.dbs.nc'
        if dbsfilename in os.listdir('.'):
            print 'DBS file found - will not run SetDbs'
            s = 'cp %s %s' % (dbsfilename, self.databasefile)
            run(s)
            return


        executable = os.path.join(self.utilities_dir,
                                  'SetDbs', 'SetDbs.PUB.exe')

        logfile = self.logbasepath + '.SetDbs.log'

        if verbose:
            header('Building meteorological database (SetDbs)')


        cmd = '%s '*7 % (executable, logfile,
                         self.inputfile, self.wind_profile,
                         self.databasefile,
                         self.topography, self.meteorological_model)


        self.runscript(cmd, 'SetDbs', logfile, lines=5,
                       verbose=verbose)
Esempio n. 5
0
def install_ubuntu_packages():    
    """Get required Ubuntu packages for geoserver.
       It is OK if they are already installed
    """

    header('Installing Ubuntu packages')     
    
    s = 'apt-get clean'
    run(s, verbose=True)
    
    #s = 'apt-get update'
    #run(s, verbose=True) 

    for package in ['build-essential', 'libxaw7-dev']:
        # Possibly also 'netcdfg-dev'
        
        s = 'apt-get -y install %s' % package
        
        log_base = '%s_install' % package
        try:
            run(s,
                stdout=log_base + '.out',
                stderr=log_base + '.err',                  
                verbose=True)
        except:
            msg = 'Installation of package %s failed. ' % package
            msg += 'See log file %s.out and %s.err for details' % (log_base, log_base)
            raise Exception(msg)
Esempio n. 6
0
    def set_database(self, verbose=True):
        """Create meteorological database

        Requires
        - input file
        - topography
        - wind profile
        """

        dbsfilename = self.scenario_name + '.dbs.nc'
        if dbsfilename in os.listdir('.'):
            print 'DBS file found - will not run SetDbs'
            s = 'cp %s %s' % (dbsfilename, self.databasefile)
            run(s)
            return

        executable = os.path.join(self.utilities_dir, 'SetDbs',
                                  'SetDbs.PUB.exe')

        logfile = self.logbasepath + '.SetDbs.log'

        if verbose:
            header('Building meteorological database (SetDbs)')

        cmd = '%s ' * 7 % (executable, logfile, self.inputfile,
                           self.wind_profile, self.databasefile,
                           self.topography, self.meteorological_model)

        self.runscript(cmd, 'SetDbs', logfile, lines=5, verbose=verbose)
Esempio n. 7
0
    def set_source(self, verbose=True):
        """Create eruptive source file

        Requires
        - input file
        - grain file
        - database file
        """

        executable = os.path.join(self.utilities_dir, 'SetSrc',
                                  'SetSrc.PUB.exe')

        logfile = self.logbasepath + '.SetSrc.log'

        if verbose:
            header('Creating eruptive source file (SetSrc)')

        cmd = '%s ' * 8 % (
            executable,
            logfile,
            self.inputfile,
            self.sourcefile,
            self.grainfile,
            self.databasefile,
            'FALL3D',  # Taken from hardwired values in Script-SetSrc
            'YES')

        self.runscript(cmd, 'SetSrc', logfile, lines=5, verbose=verbose)
Esempio n. 8
0
    def set_granum(self, verbose=True):
        """Create grainsize profile

        Requires
        - input file
        """

        grainfilename = self.scenario_name + '.grn'
        if grainfilename in os.listdir('.'):
            print 'Grainfile found - will not run SetGrn'
            s = 'cp %s %s' % (grainfilename, self.grainfile)
            run(s)
            return

        executable = os.path.join(self.utilities_dir, 'SetGrn',
                                  'SetGrn.PUB.exe')

        logfile = self.logbasepath + '.SetGrn.log'

        if verbose:
            header('Setting grain size (SetGrn)')

        cmd = '%s %s %s %s' % (executable, logfile, self.inputfile,
                               self.grainfile)

        self.runscript(cmd, 'SetGrn', logfile, lines=4, verbose=verbose)
def install_ubuntu_packages():    
    """Get required Ubuntu packages for geoserver.
       It is OK if they are already installed
    """

    header('Installing Ubuntu packages')     
    
    s = 'apt-get clean'
    run(s, verbose=True)
    
    #s = 'apt-get update'
    #run(s, verbose=True) 

    for package in ['apache2', 'libxml2', 'libxml2-dev', 'libxslt1-dev', 'postgresql', 'postgis']:
        
        s = 'apt-get -y install %s' % package
        
        log_base = '%s_install' % package
        try:
            run(s,
                stdout=log_base + '.out',
                stderr=log_base + '.err',                  
                verbose=True)
        except:
            msg = 'Installation of package %s failed. ' % package
            msg += 'See log file %s.out and %s.err for details' % (log_base, log_base)
            raise Exception(msg)
Esempio n. 10
0
def run_nc2prof(windfield_directory, verbose=True):
    """Run nc2prof - extract wind profiles from NCEP data

    Requires
        - input file
        - NCEP wind files
           TMP.nc
           HGT.nc
           UGRD.nc
           VGRD.nc
    """

    # FIXME: Perhaps include into AIM class (somehow)

    Fall3d_dir = get_fall3d_home()
    utilities_dir = os.path.join(Fall3d_dir, 'Utilities')
    executable = os.path.join(utilities_dir, 'nc2prof', 'nc2prof.exe')

    if verbose:
        header('Running nc2prof in %s' % windfield_directory)


    cmd = 'cd %s; %s ' % (windfield_directory, executable)

    logfile = 'run_nc2prof.log'
    run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
Esempio n. 11
0
    def set_granum(self, verbose=True):
        """Create grainsize profile

        Requires
        - input file
        """

        grainfilename = self.scenario_name + '.grn'
        if grainfilename in os.listdir('.'):
            print 'Grainfile found - will not run SetGrn'
            s = 'cp %s %s' % (grainfilename, self.grainfile)
            run(s)
            return


        executable = os.path.join(self.utilities_dir,
                                  'SetGrn', 'SetGrn.PUB.exe')

        logfile = self.logbasepath + '.SetGrn.log'

        if verbose:
            header('Setting grain size (SetGrn)')

        cmd = '%s %s %s %s' % (executable, logfile,
                               self.inputfile, self.grainfile)

        self.runscript(cmd, 'SetGrn', logfile, lines=4,
                       verbose=verbose)
Esempio n. 12
0
    def run_fall3d(self, verbose=True):
        """Run Fall3d (serial)

        Requires
        - input file
        - source file
        - grain file
        - database file
        """

        executable = os.path.join(self.Fall3d_dir, 'Fall3d_ser.PUB.exe')

        logfile = self.logbasepath + '.Fall3d.log'

        if verbose:
            header('Running ash model (Fall3d)')


        cmd = '%s '*7 % (executable,
                         self.inputfile,
                         self.sourcefile,
                         self.grainfile,
                         self.databasefile,
                         logfile,
                         self.resultfile)

        self.runscript(cmd, 'Fall3d', logfile, lines=2,
                       verbose=verbose)
Esempio n. 13
0
    def Xgenerate_contours(self, interval=1, verbose=True):
        """Contour NetCDF grids directly
        """
        # FIXME (Ole): This does not work - probably due to the GDAL NetCDF driver ignoring coordinate system

        if verbose:
            header('Contouring NetCDF thickness grids')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.res.nc'):

                pathname = os.path.join(self.output_dir, filename)
                if verbose: print '  ', pathname

                basename, ext = os.path.splitext(pathname)

                tiffile = basename + '.tif'
                shpfile = basename + '.shp'
                kmlfile = basename + '.kml'
                prjfile = basename + '.prj'



                # Generate GeoTIFF raster
                netcdf_subdata = 'NETCDF:"%s":THICKNESS' % pathname
                s = 'gdal_translate -of GTiff -b 1 %s %s' % (netcdf_subdata, tiffile) # FIXME: Band is hardwired
                run_with_errorcheck(s, tiffile,
                                    verbose=verbose)


                # Generate contours as shapefiles
                s = '/bin/rm -rf %s' % shpfile # Clear the way
                run(s, verbose=False)

                s = 'gdal_contour -i %f %s %s' % (interval, tiffile, shpfile)
                run_with_errorcheck(s, shpfile,
                                    verbose=verbose)


	        # Generate KML
                if self.WKT_projection:
                    # Create associated projection file
                    fid = open(prjfile, 'w')
                    fid.write(self.WKT_projection)
                    fid.close()

                    s = 'ogr2ogr -f KML -t_srs EPSG:4623 -s_srs %s %s %s' % (prjfile, kmlfile, shpfile)
                else:
                    s = 'ogr2ogr -f KML -t_srs EPSG:4623 %s %s' % (kmlfile, shpfile)

                run_with_errorcheck(s, kmlfile,
                                    verbose=verbose)
def create_subversion_repository(project):
    """Create and configure Subversion
    """
    header('Creating Subversion configuration for %s' % project)    

    # Create svn home dir if it doesn't exist and change to it
    makedir(svn_home)
        
    # Create repository 
    project_dir = os.path.join(svn_home, project)
    s = 'svnadmin create %s' % project_dir
    run(s)
    
    s = 'chown -R www-data:www-data %s' % project_dir
    run(s)
    
    s = 'chmod -R 755 %s' % project_dir
    run(s)    
    
    
    # Add information to the Apache web server
    fid = open_log('/etc/apache2/mods-enabled/dav_svn.conf', 'a')
    fid.write('\n%s%s\n' % (svn_header, project))
    fid.write('<Location /svn/%s>\n' % project)
    fid.write('  DAV svn\n')
    fid.write('  SVNPath %s\n' % project_dir)
    fid.write('  AuthType Basic\n')
    fid.write('  AuthName "Subversion Repository"\n')
    fid.write('  AuthUserFile %s\n' % password_filename) 
    fid.write('  AuthzSVNAccessFile %s\n' % auth_filename)
    fid.write('  Require valid-user\n')
    fid.write('</Location>\n')
    fid.close()

    # Make sure authentication file is available
    # FIXME (Ole): Groups are hardwired for now
    if not os.path.isfile(auth_filename):
        fid = open_log(auth_filename, 'w')
        fid.write('[groups]\n')
        fid.write('aifdr =\n')
        fid.write('guests =\n')
        fid.close()
         
    # Add project to authorization file
    fid = open_log(auth_filename, 'a')
    fid.write('\n')
    fid.write('[%s:/]\n' % project)
    fid.write('@aifdr = rw\n')
    fid.write('@guests = r\n')
    fid.close()    
Esempio n. 15
0
    def Xgenerate_contours(self, interval=1, verbose=True):
        """Contour NetCDF grids directly
        """
        # FIXME (Ole): This does not work - probably due to the GDAL NetCDF driver ignoring coordinate system

        if verbose:
            header('Contouring NetCDF thickness grids')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.res.nc'):

                pathname = os.path.join(self.output_dir, filename)
                if verbose: print '  ', pathname

                basename, ext = os.path.splitext(pathname)

                tiffile = basename + '.tif'
                shpfile = basename + '.shp'
                kmlfile = basename + '.kml'
                prjfile = basename + '.prj'

                # Generate GeoTIFF raster
                netcdf_subdata = 'NETCDF:"%s":THICKNESS' % pathname
                s = 'gdal_translate -of GTiff -b 1 %s %s' % (
                    netcdf_subdata, tiffile)  # FIXME: Band is hardwired
                run_with_errorcheck(s, tiffile, verbose=verbose)

                # Generate contours as shapefiles
                s = '/bin/rm -rf %s' % shpfile  # Clear the way
                run(s, verbose=False)

                s = 'gdal_contour -i %f %s %s' % (interval, tiffile, shpfile)
                run_with_errorcheck(s, shpfile, verbose=verbose)

                # Generate KML
                if self.WKT_projection:
                    # Create associated projection file
                    fid = open(prjfile, 'w')
                    fid.write(self.WKT_projection)
                    fid.close()

                    s = 'ogr2ogr -f KML -t_srs EPSG:4623 -s_srs %s %s %s' % (
                        prjfile, kmlfile, shpfile)
                else:
                    s = 'ogr2ogr -f KML -t_srs EPSG:4623 %s %s' % (kmlfile,
                                                                   shpfile)

                run_with_errorcheck(s, kmlfile, verbose=verbose)
Esempio n. 16
0
    def nc2grd(self, verbose=True):
        """Run nc2grd - post processing tool

        Requires
        - input file
        - source file
        - grain file
        - database file
        """

        executable = os.path.join(self.utilities_dir, 'nc2grd', 'nc2grd.exe')

        logfile = self.logbasepath + '.nc2grd.log'

        if verbose:
            header('Running nc2grd')


        cmd = '%s '*5 % (executable,
                         logfile,
                         self.inputfile,
                         self.resultfile + '.nc',
                         self.grdfile)

        self.runscript(cmd, 'nc2grd', logfile, lines=2,
                       verbose=verbose)


        # Fix the filenames up (FIXME: Hopefully this is a temporary measure)
        #print 'Post processing generated the following files:'
        for filename in os.listdir(self.output_dir):
            if filename.endswith('.grd'):
                fields = filename.split('.')

                # Ditch date and minutes
                hour, _ = fields[3].split(':')

                new_filename = fields[0] + '.' + hour + 'h.' + fields[-2] + '.' + fields[-1]


                s = 'cd %s; mv %s %s' % (self.output_dir, filename, new_filename)
                os.system(s)
Esempio n. 17
0
def run_hazardmap(model_output_directory, verbose=True):
    """Run HazardMapping.exe

    Requires
        - input file
        - Directory with FALL3D model outputs
    """

    # FIXME: Perhaps include into AIM class (somehow)

    Fall3d_dir = get_fall3d_home()
    utilities_dir = os.path.join(Fall3d_dir, 'Utilities')
    executable = os.path.join(utilities_dir, 'HazardMaps', 'HazardMapping.exe')

    if verbose:
        header('Running hazard mapping in %s' % model_output_directory)

    cmd = 'cd %s; %s ' % (model_output_directory, executable)

    logfile = 'run_hazardmapping.log'
    run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
Esempio n. 18
0
def run_hazardmap(model_output_directory, verbose=True):
    """Run HazardMapping.exe

    Requires
        - input file
        - Directory with FALL3D model outputs
    """

    # FIXME: Perhaps include into AIM class (somehow)

    Fall3d_dir = get_fall3d_home()
    utilities_dir = os.path.join(Fall3d_dir, 'Utilities')
    executable = os.path.join(utilities_dir, 'HazardMaps', 'HazardMapping.exe')

    if verbose:
        header('Running hazard mapping in %s' % model_output_directory)

    cmd = 'cd %s; %s ' % (model_output_directory, executable)

    logfile = 'run_hazardmapping.log'
    run(cmd, verbose=verbose, stdout=logfile, stderr='/dev/null')
Esempio n. 19
0
    def generate_contours(self, verbose=True):
        """Contour ASCII grids into shp and kml files

        The function uses model parameters Load_contours, Thickness_contours and Thickness_units.
        """

        if verbose:
            header('Contouring ASCII grids to SHP and KML files')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.asc'):

                if verbose: print 'Processing %s:\t' % filename
                fields = filename.split('.')

                if fields[-2] == 'load':
                    units = 'kg/m^2'
                    contours = self.params['load_contours']
                    # NOTE: gdal_contour no longer supports special characters in labels. This used to work in around 2011.
                    #attribute_name = 'Load[%s]' % units
                    attribute_name = 'Load'
                elif fields[-2] == 'thickness':
                    units = self.params['thickness_units'].lower()
                    contours = self.params['thickness_contours']
                    #attribute_name = 'Thickness[%s]' % units
                    attribute_name = 'Thickness'
                else:
                    attribute_name = fields[-2]  #'Value'
                    units = 'default'  # Unit is implied by .inp file
                    contours = True  # Default is fixed number of contours

                _generate_contours(
                    filename,
                    contours,
                    units,
                    attribute_name,
                    output_dir=self.output_dir,
                    meteorological_model=self.meteorological_model,
                    WKT_projection=self.WKT_projection,
                    verbose=verbose)
Esempio n. 20
0
    def convert_ncgrids_to_asciigrids(self, verbose=True):
        """Convert (selected) NC data layers to ASC files

        One ASCII file is generated for each timestep (assumed to be in hours).

        The purposes of the ASCII files are
        * They can be ingested by ESRI and other GIS tools.
        * They have an associated projection file that allows georeferencing.
        * They form the inputs for the contouring
        """

        if verbose:
            header('Converting NetCDF data to ASCII grids')


        for filename in os.listdir(self.output_dir):
            if filename.endswith('.res.nc'):
                if verbose: print '  ', filename
                for subdataset in ['LOAD', 'THICKNESS', 'C_FL050', 'C_FL100', 'C_FL150', 'C_FL200', 'C_FL250', 'C_FL300']:
                    nc2asc(os.path.join(self.output_dir, filename),
                           subdataset=subdataset,
                           projection=self.WKT_projection)
Esempio n. 21
0
    def convert_surfergrids_to_asciigrids(self, verbose=True):
        """Convert GRD files to ASC files

        The purposes of the ASCII files are
        * They can be ingested by ESRI and other GIS tools.
        * They have an associated projection file that allows georeferencing.
        * They form the inputs for the contouring
        """

        # FIXME (Ole): This function is probably obsolete in Fall3d, version 6
        #grd = self.params['Output_results_in_GRD_format'].lower()
        #if verbose and grd == 'yes':
        #    header('Converting GRD files to ASCII grids')

        if verbose:
            header('Converting grd files to ASCII')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.grd'):
                if verbose: print '%s -> %s' % (filename, filename[:-4] + '.asc')
                grd2asc(os.path.join(self.output_dir, filename),
                        projection=self.WKT_projection)
Esempio n. 22
0
    def run_fall3d(self, verbose=True):
        """Run Fall3d (serial)

        Requires
        - input file
        - source file
        - grain file
        - database file
        """

        executable = os.path.join(self.Fall3d_dir, 'Fall3d_ser.PUB.exe')

        logfile = self.logbasepath + '.Fall3d.log'

        if verbose:
            header('Running ash model (Fall3d)')

        cmd = '%s ' * 7 % (executable, self.inputfile, self.sourcefile,
                           self.grainfile, self.databasefile, logfile,
                           self.resultfile)

        self.runscript(cmd, 'Fall3d', logfile, lines=2, verbose=verbose)
Esempio n. 23
0
    def generate_contours(self, verbose=True):
        """Contour ASCII grids into shp and kml files

        The function uses model parameters Load_contours, Thickness_contours and Thickness_units.
        """


        if verbose:
            header('Contouring ASCII grids to SHP and KML files')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.asc'):

                if verbose: print 'Processing %s:\t' % filename
                fields = filename.split('.')

                if fields[-2] == 'load':
                    units = 'kg/m^2'
                    contours = self.params['load_contours']
                    # NOTE: gdal_contour no longer supports special characters in labels. This used to work in around 2011.
                    #attribute_name = 'Load[%s]' % units
		    attribute_name = 'Load'
                elif fields[-2] == 'thickness':
                    units = self.params['thickness_units'].lower()
                    contours = self.params['thickness_contours']
                    #attribute_name = 'Thickness[%s]' % units
		    attribute_name = 'Thickness'
                else:
                    attribute_name = fields[-2] #'Value'
                    units = 'default' # Unit is implied by .inp file
                    contours = True # Default is fixed number of contours


                _generate_contours(filename, contours, units, attribute_name,
                                   output_dir=self.output_dir,
                                   meteorological_model=self.meteorological_model,
                                   WKT_projection=self.WKT_projection,
                                   verbose=verbose)
Esempio n. 24
0
    def convert_surfergrids_to_asciigrids(self, verbose=True):
        """Convert GRD files to ASC files

        The purposes of the ASCII files are
        * They can be ingested by ESRI and other GIS tools.
        * They have an associated projection file that allows georeferencing.
        * They form the inputs for the contouring
        """

        # FIXME (Ole): This function is probably obsolete in Fall3d, version 6
        #grd = self.params['Output_results_in_GRD_format'].lower()
        #if verbose and grd == 'yes':
        #    header('Converting GRD files to ASCII grids')

        if verbose:
            header('Converting grd files to ASCII')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.grd'):
                if verbose:
                    print '%s -> %s' % (filename, filename[:-4] + '.asc')
                grd2asc(os.path.join(self.output_dir, filename),
                        projection=self.WKT_projection)
Esempio n. 25
0
    def nc2grd(self, verbose=True):
        """Run nc2grd - post processing tool

        Requires
        - input file
        - source file
        - grain file
        - database file
        """

        executable = os.path.join(self.utilities_dir, 'nc2grd', 'nc2grd.exe')

        logfile = self.logbasepath + '.nc2grd.log'

        if verbose:
            header('Running nc2grd')

        cmd = '%s ' * 5 % (executable, logfile, self.inputfile,
                           self.resultfile + '.nc', self.grdfile)

        self.runscript(cmd, 'nc2grd', logfile, lines=2, verbose=verbose)

        # Fix the filenames up (FIXME: Hopefully this is a temporary measure)
        #print 'Post processing generated the following files:'
        for filename in os.listdir(self.output_dir):
            if filename.endswith('.grd'):
                fields = filename.split('.')

                # Ditch date and minutes
                hour, _ = fields[3].split(':')

                new_filename = fields[0] + '.' + hour + 'h.' + fields[
                    -2] + '.' + fields[-1]

                s = 'cd %s; mv %s %s' % (self.output_dir, filename,
                                         new_filename)
                os.system(s)
Esempio n. 26
0
    def convert_ncgrids_to_asciigrids(self, verbose=True):
        """Convert (selected) NC data layers to ASC files

        One ASCII file is generated for each timestep (assumed to be in hours).

        The purposes of the ASCII files are
        * They can be ingested by ESRI and other GIS tools.
        * They have an associated projection file that allows georeferencing.
        * They form the inputs for the contouring
        """

        if verbose:
            header('Converting NetCDF data to ASCII grids')

        for filename in os.listdir(self.output_dir):
            if filename.endswith('.res.nc'):
                if verbose: print '  ', filename
                for subdataset in [
                        'LOAD', 'THICKNESS', 'C_FL050', 'C_FL100', 'C_FL150',
                        'C_FL200', 'C_FL250', 'C_FL300'
                ]:
                    nc2asc(os.path.join(self.output_dir, filename),
                           subdataset=subdataset,
                           projection=self.WKT_projection)
def install_ubuntu_packages():    
    """Get required Ubuntu packages for geoserver.
       It is OK if they are already installed
    """

    header('Installing Ubuntu packages')     
    
    s = 'apt-get clean'
    run(s, verbose=True)

    for package in ['apache2', 'openjdk-6-jre-lib', 'gdal-bin', 'curl', 'python-pycurl', 'python-gdal', 'python-setuptools']:

        s = 'apt-get -y install %s' % package
        
        log_base = workdir + '/' + '%s_install' % package
        try:
            run(s,
                stdout=log_base + '.out',
                stderr=log_base + '.err',                  
                verbose=True)
        except:
            msg = 'Installation of package %s failed. ' % package
            msg += 'See log file %s.out and %s.err for details' % (log_base, log_base)
            raise Exception(msg)
Esempio n. 28
0
def run_multiple_windfields(scenario,
                            windfield_directory=None,
                            hazard_output_folder=None,
                            dircomment=None,
                            echo=False,
                            verbose=True):
    """Run volcanic ash impact model for multiple wind fields.

    The wind fields are assumed to be in subfolder specified by windfield_directory,
    have the extension *.profile and follow the format use with scenarios.

    This function makes use of Open MPI and Pypar to execute in parallel but can also run sequentially.
    """

    try:
        import pypar
    except:
        P = 1
        p = 0
        processor_name = os.uname()[1]

        print 'Pypar could not be imported. Running sequentially on node %s' % processor_name,
    else:
        time.sleep(1)
        P = pypar.size()
        p = pypar.rank()
        processor_name = pypar.get_processor_name()

        print 'Processor %d initialised on node %s' % (p, processor_name)

        pypar.barrier()


    if p == 0:

        # Put logs along with the results
        logdir = os.path.join(hazard_output_folder, 'logs')
        makedir(logdir)

        header('Hazard modelling using multiple wind fields')
        print '*  Wind profiles obtained from: %s' % windfield_directory
        print '*  Scenario results stored in:  %s' %  hazard_output_folder
        print '*  Log files:'

        t_start = time.time()

        # Communicate hazard output directory name to all nodes to ensure they have exactly the same time stamp.
        for i in range(P):
            pypar.send((hazard_output_folder), i)
    else:
        # Receive correctly timestamped output directory names
        hazard_output_folder = pypar.receive(0)
        logdir = os.path.join(hazard_output_folder, 'logs')


    try:
        name = os.path.splitext(scenario)[0]
    except:
        name = 'run'


    # Wait until log dir has been created
    pypar.barrier()

    params = get_scenario_parameters(scenario)

    # Start processes staggered to avoid race conditions for disk access (otherwise it is slow to get started)
    time.sleep(2*p)

    # Logging
    s = 'Proc %i' % p
    print '     %s -' % string.ljust(s, 8),
    AIM_logfile = os.path.join(logdir, 'P%i.log' % p)
    start_logging(filename=AIM_logfile, echo=False)

    # Get cracking
    basename, _ = os.path.splitext(scenario)
    count_local = 0
    count_all = 0
    for i, file in enumerate(os.listdir(windfield_directory)):

        count_all += 1

        # Distribute jobs cyclically to processors
        if i%P == p:

            if not file.endswith('.profile'):
                continue

            count_local += 1

            windfield = '%s/%s' % (windfield_directory, file)
            windname, _ = os.path.splitext(file)
            header('Computing event %i on processor %i using wind field: %s' % (i, p, windfield))



            if dircomment is None:
                dircomment = params['eruption_comment']

            # Override or create parameters derived from native Fall3d wind field
            params['wind_profile'] = windfield
            params['wind_altitudes'] = get_layers_from_windfield(windfield) # FIXME: Try to comment this out.
            params['Meteorological_model'] = 'profile'

            if hazard_output_folder is None:
                hazard_output_folder = basename + '_hazard_outputs'

            if p == 0:
                print 'Storing multiple outputs in directory: %s' % hazard_output_folder

            # Run scenario
            aim = _run_scenario(params,
                                timestamp_output=True,
                                dircomment=dircomment + '_run%i_proc%i' % (i, p))

            # Make sure folder is present and can be shared by group
            makedir(hazard_output_folder)
            s = 'chmod -R g+w %s' % hazard_output_folder
            run(s)

            # Copy result file to output folder
            result_file = aim.scenario_name + '.res.nc'
            newname = aim.scenario_name + '.%s.res.nc' % windname # Name after wind file
            s = 'cp %s/%s %s/%s' % (aim.output_dir, result_file, hazard_output_folder, newname)
            run(s)

            # Create projectionfile in hazard output
            if i == 0:
                s = 'cp %s %s/%s' % (aim.projection_file, hazard_output_folder, 'HazardMaps.res.prj')
                run(s)

            # Clean up outputs from this scenario
            print 'P%i: Cleaning up %s' % (p, aim.output_dir)
            s = '/bin/rm -rf %s' % aim.output_dir
            run(s)

    print 'Processor %i done %i windfields' % (p, count_local)
    print 'Outputs available in directory: %s' % hazard_output_folder

    pypar.barrier()
    if p == 0:
        print 'Parallel simulation finished %i windfields in %i seconds' % (count_all, time.time() - t_start)


    pypar.finalize()
def backup_all():
    svn_projects = os.listdir(svn_home)

    header('Backing up Subversion/TRAC projects: %s' % time.asctime())
    for project in svn_projects:    
        backup(project)
Esempio n. 30
0
def _run_scenario(scenario, dircomment=None,
                  store_locally=False,
                  timestamp_output=True,
                  output_dir=None,
                  verbose=True):
    """Run volcanic ash impact scenario

    The argument scenario can be either
    * A Python script
    or
    * A Dictionary

    In any case scenario must specify all required
    volcanological parameters as stated in the file required_parameters.txt.

    If any parameters are missing or if additional parameters are
    specified an exception will be raised.

    Optional parameters:
      dircomment: will be added to output dir for easy identification.
      store_locally: if True, don't use TEPHRAHOME for outputs
      timestamp_output: If True, add timestamp to output dir
                        If False overwrite previous output with same name



    """

    t_start = time.time()

    params = get_scenario_parameters(scenario)

    if dircomment is None:
        dircomment = params['eruption_comment']

    # Determine if any of the parameters provide are a tuple
    # in which case each combination is run separately
    for name in params:
        p = params[name]
        if type(p) is tuple:
            # Unpack tuple and run scenario for each parameter value
            # This recursion will continue until no parameters
            # have tuples as values
            params_unpacked = params.copy()
            for value in p:
                params_unpacked[name] = value
                aim = run_scenario(params_unpacked, dircomment=dircomment + '_%s_%s' % (name, value),
                                   store_locally=store_locally,
                                   timestamp_output=timestamp_output,
                                   verbose=verbose)

            return

    # Instantiate model object
    aim = AIM(params,
              dircomment=dircomment,
              store_locally=store_locally,
              timestamp_output=timestamp_output,
              output_dir=output_dir,
              verbose=verbose)

    if not aim.postprocessing:
        # Store scenario script, input data files and
        # actual parameters to provide a complete audit trail
        aim.store_inputdata(verbose=verbose)

        # Generate input file for Fall3d-6
        aim.write_input_file(verbose=verbose)

        # Generate input data files in Fall3D format
        aim.generate_topography(verbose=verbose)

        # Run scripts for Fall3d
        aim.set_granum(verbose=verbose)
        aim.set_database(verbose=verbose)
        aim.set_source(verbose=verbose)
        aim.run_fall3d(verbose=verbose)

    # Fall3d postprocessing nc2grd
    #aim.nc2grd()

    # AIM post processing
    aim.convert_ncgrids_to_asciigrids(verbose=verbose)
    #aim.convert_surfergrids_to_asciigrids()
    aim.generate_contours(verbose=verbose)

    aim.organise_output()

    # Done
    if verbose:
        header('Simulation finished in %.2f seconds, output data are in %s'
                   % (time.time() - t_start,
                      aim.output_dir))

        # FIXME (Ole): Commented out due to parallelisation
        #try:
        #    target = os.readlink(aim.symlink)
        #except:
        #    header('WARNING: Shortcut %s does not appear to be working. Use real directory instead.' % aim.symlink)
        #    #print 'Error message was', e
        #else:
        #
        #    if target == aim.output_dir:
        #        header('Shortcut to output data is: %s -> %s' % (aim.symlink, target))
        #    else:
        #        header('WARNING: Shortcut %s has been changed by more recent run to: %s' % (aim.symlink, target))
        #
        print

    # Return object in case user wants access to it
    # (e.g. for further postprocessing)
    return aim
Esempio n. 31
0
    def __init__(self,
                 params,
                 timestamp_output=True,
                 store_locally=False,
                 dircomment=None,
                 output_dir=None,
                 echo=True,
                 verbose=True):
        """Create AIM instance, common file names


        Optional arguments:
        timestamp_output: If True, create unique output directory with timestamp
                          If False, overwrite output at every run
        store_locally: If True, store in same directory where scenario scripts
                                are stored
                       If False, use environment variable TEPHRADATA for output.
        dircomment (string or None): Optional comment added to output dir
        echo (True or False): Optionally print output to screen as well as log file. Default True.
        verbose: (True, False) determine if diagnostic output is to be printed
        """

        params = params.copy()  # Ensure modifications are kept local

        #---------------------------------
        # AIM names, files and directories
        #---------------------------------

        # AIM names and directories
        self.scenario_name = scenario_name = params['scenario_name']

        import sys
        if len(sys.argv) > 1:
            # Assume that only postprocessing is requested using data in provided directory.
            self.postprocessing = True

            output_dir = sys.argv[1]
        else:
            # Create output dir

            self.postprocessing = False

            if output_dir is None:
                output_dir = build_output_dir(
                    tephra_output_dir=tephra_output_dir,
                    type_name='scenarios',
                    scenario_name=scenario_name,
                    dircomment=dircomment,
                    store_locally=store_locally,
                    timestamp_output=timestamp_output)

        # Base filename for all files in this scenario
        logpath = os.path.join(output_dir, 'logs')

        # Create dirs
        makedir(output_dir)
        makedir(logpath)

        # Record dirs and basefilenames
        self.output_dir = output_dir
        self.logbasepath = os.path.join(logpath, scenario_name)
        self.basepath = os.path.join(output_dir, scenario_name)

        if verbose:
            header('Running AIM/Fall3d scenario %s' % self.scenario_name)
            print 'Writing to %s' % output_dir

        # Get name of topographic grid
        self.topography_grid = params['topography_grid']

        # Derive projection file name
        basename, ext = os.path.splitext(self.topography_grid)
        self.projection_file = basename + '.prj'

        # Read projection if available
        self.WKT_projection = None  # Default - no projection
        self.projection = None  # Default - no projection

        # Take note of projection file if present
        try:
            infile = open(self.projection_file)
        except:
            msg = 'Projection file %s could not be opened. '\
                % self.projection_file
            msg += 'The topography file must have a projection file with '
            msg += 'extension .prj to georeference the model outputs '
            msg += 'correctly. The projection file is assumed to be '
            msg += 'ESRI WKT projection file '
            msg += 'named %s.' % self.projection_file
            raise Exception(msg)

        # Read in projection file
        self.WKT_projection = infile.read()

        # This section extracts projection details
        srs = osr.SpatialReference()
        srs.ImportFromWkt(self.WKT_projection)
        proj4 = srs.ExportToProj4()
        fields = proj4.split()

        zone = proj = datum = units = None

        if '+south' in fields:
            hemisphere = 'S'
        else:
            hemisphere = 'N'

        for field in fields:
            #print field

            res = field.split('=')
            if len(res) == 2:
                x, y = res
                if x == '+zone': zone = y
                if x == '+proj': proj = y
                if x == '+ellps': datum = y
                if x == '+units': units = y

        if verbose:
            header('Got georeferencing: %s' % str(proj4))

        self.projection = {}
        self.projection['zone'] = zone
        self.projection['hemisphere'] = hemisphere
        self.projection['proj'] = proj
        self.projection['datum'] = datum
        self.projection['units'] = units
        #print zone, hemisphere, proj, datum, units

        # Determine if topography is an AIM input file
        msg = 'AIM topography grid %s must have extension .txt' % self.topography_grid
        assert ext == '.txt', msg

        # FIXME: Deprecate native_AIM_topo option
        try:
            fid = open(self.topography_grid)
        except:
            self.native_AIM_topo = False
        else:
            fid.close()
            self.native_AIM_topo = True

        # Check wind profile
        msg = 'Keyword wind_profile must be present in AIM script and point to file containing wind data or to an ACCESS web site'
        assert 'wind_profile' in params, msg

        # If wind profile is an ACCESS web site: download, generate profile and point AIM to it
        if params['wind_profile'].find('://') > -1:
            # This looks like a web address - get the file list, generate profile and redefine 'wind_profile'

            vent_location = (params['x_coordinate_of_vent'],
                             params['y_coordinate_of_vent'], zone, hemisphere)
            params['wind_profile'] = get_profile_from_web(
                params['wind_profile'], vent_location, verbose=verbose)

        # Register wind profile
        wind_basename, wind_ext = os.path.splitext(params['wind_profile'])

        msg = 'Unknown format for wind field: %s. Allowed is .profile (the native FALL3D wind profile format)' % params[
            'wind_profile']
        assert wind_ext == '.profile', msg

        self.wind_profile = wind_basename + '.profile'  # Native FALL3D wind profile
        self.meteorological_model = params[
            'Meteorological_model'] = 'profile'  # Do NCEP later if needed

        #--------------------------------------
        # Fall3d specific files and directories
        #--------------------------------------

        # Fall3d directories
        self.Fall3d_dir = Fall3d_dir = get_fall3d_home()
        self.utilities_dir = os.path.join(Fall3d_dir, 'Utilities')

        # Fall3d input files
        self.inputfile = self.basepath + '.inp'
        self.grainfile = self.basepath + '.grn'
        self.sourcefile = self.basepath + '.src'

        # Topographic surfer grid generated from scenario_topography.txt
        self.topography = self.basepath + '.top'

        # Output database file
        self.databasefile = self.basepath + '.dbs.nc'

        # Output result file (Fall3d adds another .nc to this)
        self.resultfile = self.basepath + '.res'

        # Output Surfer grid file
        self.grdfile = self.basepath + '.grd'

        #----------------------------
        # Precomputations, checks etc
        #----------------------------

        # Verify that the right parameters have been provided
        #check_presence_of_required_parameters(params)

        # Derive implied spatial and modelling parameters
        derive_implied_parameters(self.topography_grid, self.projection,
                                  params)

        # Check that parameters are physically compatible
        check_parameter_ranges(params)
        self.params = params
Esempio n. 32
0
def _run_scenario(scenario,
                  dircomment=None,
                  store_locally=False,
                  timestamp_output=True,
                  output_dir=None,
                  verbose=True):
    """Run volcanic ash impact scenario

    The argument scenario can be either
    * A Python script
    or
    * A Dictionary

    In any case scenario must specify all required
    volcanological parameters as stated in the file required_parameters.txt.

    If any parameters are missing or if additional parameters are
    specified an exception will be raised.

    Optional parameters:
      dircomment: will be added to output dir for easy identification.
      store_locally: if True, don't use TEPHRAHOME for outputs
      timestamp_output: If True, add timestamp to output dir
                        If False overwrite previous output with same name



    """

    t_start = time.time()

    params = get_scenario_parameters(scenario)

    if dircomment is None:
        dircomment = params['eruption_comment']

    # Determine if any of the parameters provide are a tuple
    # in which case each combination is run separately
    for name in params:
        p = params[name]
        if type(p) is tuple:
            # Unpack tuple and run scenario for each parameter value
            # This recursion will continue until no parameters
            # have tuples as values
            params_unpacked = params.copy()
            for value in p:
                params_unpacked[name] = value
                aim = run_scenario(params_unpacked,
                                   dircomment=dircomment + '_%s_%s' %
                                   (name, value),
                                   store_locally=store_locally,
                                   timestamp_output=timestamp_output,
                                   verbose=verbose)

            return

    # Instantiate model object
    aim = AIM(params,
              dircomment=dircomment,
              store_locally=store_locally,
              timestamp_output=timestamp_output,
              output_dir=output_dir,
              verbose=verbose)

    if not aim.postprocessing:
        # Store scenario script, input data files and
        # actual parameters to provide a complete audit trail
        aim.store_inputdata(verbose=verbose)

        # Generate input file for Fall3d-6
        aim.write_input_file(verbose=verbose)

        # Generate input data files in Fall3D format
        aim.generate_topography(verbose=verbose)

        # Run scripts for Fall3d
        aim.set_granum(verbose=verbose)
        aim.set_database(verbose=verbose)
        aim.set_source(verbose=verbose)
        aim.run_fall3d(verbose=verbose)

    # Fall3d postprocessing nc2grd
    #aim.nc2grd()

    # AIM post processing
    aim.convert_ncgrids_to_asciigrids(verbose=verbose)
    #aim.convert_surfergrids_to_asciigrids()
    aim.generate_contours(verbose=verbose)

    aim.organise_output()

    # Done
    if verbose:
        header('Simulation finished in %.2f seconds, output data are in %s' %
               (time.time() - t_start, aim.output_dir))

        # FIXME (Ole): Commented out due to parallelisation
        #try:
        #    target = os.readlink(aim.symlink)
        #except:
        #    header('WARNING: Shortcut %s does not appear to be working. Use real directory instead.' % aim.symlink)
        #    #print 'Error message was', e
        #else:
        #
        #    if target == aim.output_dir:
        #        header('Shortcut to output data is: %s -> %s' % (aim.symlink, target))
        #    else:
        #        header('WARNING: Shortcut %s has been changed by more recent run to: %s' % (aim.symlink, target))
        #
        print

    # Return object in case user wants access to it
    # (e.g. for further postprocessing)
    return aim
Esempio n. 33
0
    if modified:
        print 'Bash configuration file ~/.bashrc has been modified'
        print 'You can change it manually if you wish.'
        print
        
        
    
    #---------------------
    # Determine FALL3DHOME
    #---------------------
    if 'FALL3DHOME' in os.environ:
        FALL3DHOME = os.environ['FALL3DHOME']
    else:
        FALL3DHOME = os.getcwd()
        
    header('Fall3d will be installed in %s' % FALL3DHOME)
        
    makedir(FALL3DHOME)
    os.chdir(FALL3DHOME)
                
    #----------------
    # Download Fall3d version 6 (public version)
    # http://www.bsc.es/projects/earthscience/fall3d/Downloads/Fall3d-PUB.tar.gz
    #----------------

    path = os.path.join(url, tarball)

    if not os.path.isfile(tarball): 
        # FIXME: Should also check integrity of tgz file.
        cmd = 'wget ' + path
        run(cmd, verbose=True)
Esempio n. 34
0
    def __init__(self, params,
                 timestamp_output=True,
                 store_locally=False,
                 dircomment=None,
                 output_dir=None,
                 echo=True,
                 verbose=True):
        """Create AIM instance, common file names


        Optional arguments:
        timestamp_output: If True, create unique output directory with timestamp
                          If False, overwrite output at every run
        store_locally: If True, store in same directory where scenario scripts
                                are stored
                       If False, use environment variable TEPHRADATA for output.
        dircomment (string or None): Optional comment added to output dir
        echo (True or False): Optionally print output to screen as well as log file. Default True.
        verbose: (True, False) determine if diagnostic output is to be printed
        """

        params = params.copy() # Ensure modifications are kept local

        #---------------------------------
        # AIM names, files and directories
        #---------------------------------

        # AIM names and directories
        self.scenario_name = scenario_name = params['scenario_name']

        import sys
        if len(sys.argv) > 1:
            # Assume that only postprocessing is requested using data in provided directory.
            self.postprocessing = True

            output_dir = sys.argv[1]
        else:
            # Create output dir

            self.postprocessing = False


            if output_dir is None:
                output_dir = build_output_dir(tephra_output_dir=tephra_output_dir,
                                              type_name='scenarios',
                                              scenario_name=scenario_name,
                                              dircomment=dircomment,
                                              store_locally=store_locally,
                                              timestamp_output=timestamp_output)


        # Base filename for all files in this scenario
        logpath = os.path.join(output_dir, 'logs')

        # Create dirs
        makedir(output_dir)
        makedir(logpath)

        # Record dirs and basefilenames
        self.output_dir = output_dir
        self.logbasepath =  os.path.join(logpath, scenario_name)
        self.basepath = os.path.join(output_dir, scenario_name)

        if verbose:
            header('Running AIM/Fall3d scenario %s' % self.scenario_name)
            print 'Writing to %s' % output_dir

        # Get name of topographic grid
        self.topography_grid = params['topography_grid']

        # Derive projection file name
        basename, ext = os.path.splitext(self.topography_grid)
        self.projection_file = basename + '.prj'

        # Read projection if available
        self.WKT_projection = None # Default - no projection
        self.projection = None # Default - no projection

        # Take note of projection file if present
        try:
            infile = open(self.projection_file)
        except:
            msg = 'Projection file %s could not be opened. '\
                % self.projection_file
            msg += 'The topography file must have a projection file with '
            msg += 'extension .prj to georeference the model outputs '
            msg += 'correctly. The projection file is assumed to be '
            msg += 'ESRI WKT projection file '
            msg += 'named %s.' % self.projection_file
            raise Exception(msg)

        # Read in projection file
        self.WKT_projection = infile.read()

        # This section extracts projection details
        srs = osr.SpatialReference()
        srs.ImportFromWkt(self.WKT_projection)
        proj4 = srs.ExportToProj4()
        fields = proj4.split()

        zone = proj = datum = units = None

        if '+south' in fields:
            hemisphere = 'S'
        else:
            hemisphere = 'N'

        for field in fields:
            #print field

            res = field.split('=')
            if len(res) == 2:
                x, y = res
                if x == '+zone': zone = y
                if x == '+proj': proj = y
                if x == '+ellps': datum = y
                if x == '+units': units = y

        if verbose:
            header('Got georeferencing: %s' % str(proj4))

        self.projection = {}
        self.projection['zone'] = zone
        self.projection['hemisphere'] = hemisphere
        self.projection['proj'] = proj
        self.projection['datum'] = datum
        self.projection['units'] = units
        #print zone, hemisphere, proj, datum, units


        # Determine if topography is an AIM input file
        msg = 'AIM topography grid %s must have extension .txt' % self.topography_grid
        assert ext == '.txt', msg


        # FIXME: Deprecate native_AIM_topo option
        try:
            fid = open(self.topography_grid)
        except:
            self.native_AIM_topo = False
        else:
            fid.close()
            self.native_AIM_topo = True


        # Check wind profile
        msg = 'Keyword wind_profile must be present in AIM script and point to file containing wind data or to an ACCESS web site'
        assert 'wind_profile' in params, msg

        # If wind profile is an ACCESS web site: download, generate profile and point AIM to it
        if params['wind_profile'].find('://') > -1:
            # This looks like a web address - get the file list, generate profile and redefine 'wind_profile'

            vent_location = (params['x_coordinate_of_vent'],
                             params['y_coordinate_of_vent'],
                             zone, hemisphere)
            params['wind_profile'] = get_profile_from_web(params['wind_profile'], vent_location, verbose=verbose)



        # Register wind profile
        wind_basename, wind_ext = os.path.splitext(params['wind_profile'])

        msg = 'Unknown format for wind field: %s. Allowed is .profile (the native FALL3D wind profile format)' % params['wind_profile']
        assert wind_ext == '.profile', msg

        self.wind_profile = wind_basename + '.profile' # Native FALL3D wind profile
        self.meteorological_model = params['Meteorological_model'] = 'profile' # Do NCEP later if needed


        #--------------------------------------
        # Fall3d specific files and directories
        #--------------------------------------

        # Fall3d directories
        self.Fall3d_dir = Fall3d_dir = get_fall3d_home()
        self.utilities_dir = os.path.join(Fall3d_dir, 'Utilities')

        # Fall3d input files
        self.inputfile = self.basepath + '.inp'
        self.grainfile = self.basepath + '.grn'
        self.sourcefile = self.basepath + '.src'

        # Topographic surfer grid generated from scenario_topography.txt
        self.topography = self.basepath + '.top'

        # Output database file
        self.databasefile = self.basepath + '.dbs.nc'

        # Output result file (Fall3d adds another .nc to this)
        self.resultfile = self.basepath + '.res'

        # Output Surfer grid file
        self.grdfile = self.basepath + '.grd'


        #----------------------------
        # Precomputations, checks etc
        #----------------------------

        # Verify that the right parameters have been provided
        #check_presence_of_required_parameters(params)

        # Derive implied spatial and modelling parameters
        derive_implied_parameters(self.topography_grid, self.projection, params)

        # Check that parameters are physically compatible
        check_parameter_ranges(params)
        self.params = params
def create_trac_environment(project, administrator=None):
    """Create and configure TRAC
    """
    header('Creating TRAC configuration for %s' % project)        

    # Create trac home dir if it doesn't exist
    makedir(trac_home)

    project_home = os.path.join(trac_home, project)
    # Create environment 
    s = 'trac-admin %s initenv ' % project_home
    s += '%s ' % project  # Project name
    s += '%s ' % 'sqlite:db/trac.db' # Database connection string
    s += '%s ' % 'svn'    # Repository type
    s += '%s ' % os.path.join(svn_home, project) # Path to repository

    # Temporary fix to reflect changes from TRAC 0.10.4 to 0.11.1
    v = get_TRAC_version()
    if v not in ['0.11.1', '0.11.4']:
        # Templates directory (Only in TRAC 0.10.4, gone in 0.11.1)
        s += '/usr/share/trac/templates'

    s += ' > initenv.log'
    s += ' 2> initenv.err'
    err=run(s)    
    if err != 0:
        msg = 'TRAC initenv failed to complete. See initenv.log and initenv.err for details'
        raise Exception(msg)
    # Clean up log files
    os.remove('initenv.log')
    os.remove('initenv.err')
    
    s = 'chown -R www-data:www-data %s' % project_home
    run(s)
    
    s = 'chmod -R 755 %s' % project_home
    run(s)        
    
    # Add information to the Apache web server
    fid = open_log('/etc/apache2/httpd.conf', 'a')
    fid.write('\n%s%s\n' % (trac_header, project))
    fid.write('<Location /projects/%s>\n' % project)
    fid.write('   SetHandler mod_python\n')
    fid.write('   PythonInterpreter main_interpreter\n')
    fid.write('   PythonHandler trac.web.modpython_frontend\n') 
    fid.write('   PythonOption TracEnv %s\n' % project_home)
    fid.write('   PythonOption TracUriRoot /projects/%s\n' % project)
    #fid.write('   PythonDebug on\n')
    fid.write('</Location>\n\n')
    
    fid.write('<Location /projects/%s/login>\n' % project)
    fid.write('   AuthType Basic\n')
    fid.write('   AuthName "%s"\n' % project)
    fid.write('   AuthUserFile %s\n' % password_filename)
    fid.write('   Require valid-user\n')
    fid.write('</Location>\n')
    
    fid.close()

    # Set default TRAC permissions
    os.chdir('%s' % project_home)
    s = "trac-admin . permission remove '*' '*'"
    run(s)
    #s = "trac-admin . permission add anonymous WIKI_VIEW"
    #run(s)
    #s = "trac-admin . permission add authenticated WIKI_ADMIN"
    #run(s)
    #s = "trac-admin . permission add authenticated TICKET_ADMIN"    
    #run(s)
    s = "trac-admin . permission add authenticated WIKI_VIEW"
    run(s)
    

    if administrator is not None:
        s = "trac-admin . permission add %s TRAC_ADMIN" % administrator   
        run(s)        

    # Patch trac-ini to avoid annoying 'missing header_logo'
    filename = os.path.join(project_home, 'conf', 'trac.ini')
    
    replace_string_in_file(filename, 
                           'alt = (please configure the [header_logo] section in trac.ini)',
                           'alt = ')
    replace_string_in_file(filename, 
                           'src = site/your_project_logo.png',
                           'src =')
Esempio n. 36
0
def download_wind_data(url, verbose=True):
    """Download data files
    """

    # Make sure work area exists
    makedir(work_area)

    # Get available files
    fid = urllib2.urlopen(url)
    print dir(fid)

    # Select files to download
    files = []
    timestamps = {}
    for line in fid.readlines():
        fields = line.split()
        filename = fields[-1]

        fields = filename.split('.')

        if fields[0] == 'IDY25300':
            msg = 'File %s obtained from %s does not look like an ACCESS file. I expected suffix .pressure.nc' % (filename, url)
            assert filename.endswith('.pressure.nc4'), msg

            # Record each unique timestamp
            current_timestamp = fields[4]
            timestamps[current_timestamp] = None

            if fields[2] == 'all-flds' and fields[3] == 'all_lvls':
                hour = int(fields[5])
                if hour <= last_hour:
                    files.append(filename)


    if len(files) == 0:
        msg = 'Did not get any suitable ACCESS wind files from %s' % url
        raise Exception(msg)


    # Keep only those with the latest timestamp - in cases where more than one exist
    cur_t = time.mktime(time.strptime(current_timestamp, '%Y%m%d%H'))
    for timestamp in timestamps.keys():
        t = time.mktime(time.strptime(timestamp, '%Y%m%d%H'))

        if t > cur_t:
            current_timestamp = timestamp
            cur_t = t

    # Clear out files different from this batch (i.e. older)
    if verbose: print 'Selecting files with timestamp: %s' % current_timestamp
    for filename in os.listdir(work_area):

        if filename.endswith('.pressure.nc4'):
            timestamp = filename.split('.')[3]

            if timestamp != current_timestamp:
                if verbose: print 'Deleting %s' % filename
                cmd = 'cd %s; /bin/rm -f %s' % (work_area, filename)
                run(cmd, verbose=False)

    # Download the latest files (if they already exist it won't take any bandwidth)
    for filename in files:

        timestamp = filename.split('.')[4]
        if timestamp == current_timestamp:
            if verbose: header('Downloading %s from %s' % (filename, url))
            cmd = 'cd %s; wget -c %s/%s' % (work_area, url, filename) # -c option requests wget to continue partial downloads
            run(cmd, verbose=verbose)
Esempio n. 37
0
def run_multiple_windfields(scenario,
                            windfield_directory=None,
                            hazard_output_folder=None,
                            dircomment=None,
                            echo=False,
                            verbose=True):
    """Run volcanic ash impact model for multiple wind fields.

    The wind fields are assumed to be in subfolder specified by windfield_directory,
    have the extension *.profile and follow the format use with scenarios.

    This function makes use of Open MPI and Pypar to execute in parallel but can also run sequentially.
    """

    try:
        import pypar
    except:
        P = 1
        p = 0
        processor_name = os.uname()[1]

        print 'Pypar could not be imported. Running sequentially on node %s' % processor_name,
    else:
        time.sleep(1)
        P = pypar.size()
        p = pypar.rank()
        processor_name = pypar.get_processor_name()

        print 'Processor %d initialised on node %s' % (p, processor_name)

        pypar.barrier()

    if p == 0:

        # Put logs along with the results
        logdir = os.path.join(hazard_output_folder, 'logs')
        makedir(logdir)

        header('Hazard modelling using multiple wind fields')
        print '*  Wind profiles obtained from: %s' % windfield_directory
        print '*  Scenario results stored in:  %s' % hazard_output_folder
        print '*  Log files:'

        t_start = time.time()

        # Communicate hazard output directory name to all nodes to ensure they have exactly the same time stamp.
        for i in range(P):
            pypar.send((hazard_output_folder), i)
    else:
        # Receive correctly timestamped output directory names
        hazard_output_folder = pypar.receive(0)
        logdir = os.path.join(hazard_output_folder, 'logs')

    try:
        name = os.path.splitext(scenario)[0]
    except:
        name = 'run'

    # Wait until log dir has been created
    pypar.barrier()

    params = get_scenario_parameters(scenario)

    # Start processes staggered to avoid race conditions for disk access (otherwise it is slow to get started)
    time.sleep(2 * p)

    # Logging
    s = 'Proc %i' % p
    print '     %s -' % string.ljust(s, 8),
    AIM_logfile = os.path.join(logdir, 'P%i.log' % p)
    start_logging(filename=AIM_logfile, echo=False)

    # Get cracking
    basename, _ = os.path.splitext(scenario)
    count_local = 0
    count_all = 0
    for i, file in enumerate(os.listdir(windfield_directory)):

        count_all += 1

        # Distribute jobs cyclically to processors
        if i % P == p:

            if not file.endswith('.profile'):
                continue

            count_local += 1

            windfield = '%s/%s' % (windfield_directory, file)
            windname, _ = os.path.splitext(file)
            header('Computing event %i on processor %i using wind field: %s' %
                   (i, p, windfield))

            if dircomment is None:
                dircomment = params['eruption_comment']

            # Override or create parameters derived from native Fall3d wind field
            params['wind_profile'] = windfield
            params['wind_altitudes'] = get_layers_from_windfield(
                windfield)  # FIXME: Try to comment this out.
            params['Meteorological_model'] = 'profile'

            if hazard_output_folder is None:
                hazard_output_folder = basename + '_hazard_outputs'

            if p == 0:
                print 'Storing multiple outputs in directory: %s' % hazard_output_folder

            # Run scenario
            aim = _run_scenario(params,
                                timestamp_output=True,
                                dircomment=dircomment + '_run%i_proc%i' %
                                (i, p))

            # Make sure folder is present and can be shared by group
            makedir(hazard_output_folder)
            s = 'chmod -R g+w %s' % hazard_output_folder
            run(s)

            # Copy result file to output folder
            result_file = aim.scenario_name + '.res.nc'
            newname = aim.scenario_name + '.%s.res.nc' % windname  # Name after wind file
            s = 'cp %s/%s %s/%s' % (aim.output_dir, result_file,
                                    hazard_output_folder, newname)
            run(s)

            # Create projectionfile in hazard output
            if i == 0:
                s = 'cp %s %s/%s' % (aim.projection_file, hazard_output_folder,
                                     'HazardMaps.res.prj')
                run(s)

            # Clean up outputs from this scenario
            print 'P%i: Cleaning up %s' % (p, aim.output_dir)
            s = '/bin/rm -rf %s' % aim.output_dir
            run(s)

    print 'Processor %i done %i windfields' % (p, count_local)
    print 'Outputs available in directory: %s' % hazard_output_folder

    pypar.barrier()
    if p == 0:
        print 'Parallel simulation finished %i windfields in %i seconds' % (
            count_all, time.time() - t_start)

    pypar.finalize()
        
        if user_exists:
            # Make sure user is authenticated
            update_authentication_file(administrator)
        else:
            print 'Requested administrator %s does not exist as Subversion/TRAC user' % administrator
            print 'Please run the script: create_user.py %s first' % administrator 
            import sys; sys.exit()
    else: 
        administrator = None


    check_existence_of_project(project)        
    create_subversion_repository(project)
    create_trac_environment(project, administrator)    
    
    # Restart web server
    run('/etc/init.d/apache2 restart')

    print
    header('Repository %s created.' % project)
    print 'The Subversion URL is http://<web server>/svn/%s' % project
    print 'The TRAC pages are available at http://<web server>/projects/%s' % project
    print
    print 'Files modified:'
    print filenames_updated.keys()
    print 
    print 'Possibly modify file %s for access details' % auth_filename
    #os.system('cat %s' % auth_filename)