Esempio n. 1
0
    def checkconsistency(self, md, solution, analyses):  # {{{
        md = checkfield(md, 'fieldname', 'mask.groundedice_levelset', 'size',
                        [md.mesh.numberofvertices])
        md = checkfield(md, 'fieldname', 'mask.ice_levelset', 'size',
                        [md.mesh.numberofvertices])
        md = checkfield(md, 'fieldname', 'mask.ocean_levelset', 'size',
                        [md.mesh.numberofvertices])
        md = checkfield(md, 'fieldname', 'mask.land_levelset', 'size',
                        [md.mesh.numberofvertices])
        isice = (md.mask.ice_levelset <= 0)
        if sum(isice) == 0:
            print('no ice present in the domain')

        if max(md.mask.ice_levelset) < 0:
            print('no ice front provided')

        elements = md.mesh.elements - 1
        elements = elements.astype(np.int32, copy=False)
        icefront = np.sum(md.mask.ice_levelset[elements] == 0, axis=1)
        if (max(icefront) == 3 & m.strcmp(md.mesh.elementtype(), 'Tria')) or (
                max(icefront == 6) & m.strcmp(md.mesh.elementtype(), 'Penta')):
            raise RuntimeError(
                'At least one element has all nodes on ice front, change md.mask.ice_levelset to fix it'
            )

        return md
Esempio n. 2
0
def displayunit(offset, name, characterization, comment):  # {{{

    #take care of name
    if len(name) > 23:
        name = "%s..." % name[:20]

    #take care of characterization
    if m.strcmp(characterization, "''") or m.strcmp(
            characterization, '""') or m.strcmpi(characterization, 'nan'):
        characterization = "N/A"

    if len(characterization) > 15:
        characterization = "%s..." % characterization[:12]

    #print
    if not comment:
        string = "%s%-23s: %-15s" % (offset, name, characterization)
    else:
        if isinstance(comment, (str, unicode)):
            string = "%s%-23s: %-15s -- %s" % (offset, name, characterization,
                                               comment)
        elif isinstance(comment, list):
            string = "%s%-23s: %-15s -- %s" % (offset, name, characterization,
                                               comment[0])
            for commenti in comment:
                string += "\n%s%-23s  %-15s    %s" % (offset, '', '', commenti)
        else:
            raise RuntimeError(
                "fielddisplay error message: format for comment not supported yet"
            )

    return string
Esempio n. 3
0
def waitonlock(md):
    """
	WAITONLOCK - wait for a file
 
	   This routine will return when a file named 'filename' is written to disk.
	   If the time limit given in input is exceeded, return 0
 
	   Usage:
	      flag=waitonlock(md)
	"""

    #Get filename (lock file) and options
    executionpath = md.cluster.executionpath
    cluster = md.cluster.name
    login = md.cluster.login
    port = md.cluster.port
    timelimit = md.settings.waitonlock
    filename = os.path.join(executionpath, md.private.runtimename,
                            md.miscellaneous.name + '.lock')

    #waitonlock will work if the lock is on the same machine only:
    if not m.strcmpi(gethostname(), cluster):

        print 'solution launched on remote cluster. log in to detect job completion.'
        choice = raw_input('Is the job successfully completed? (y/n) ')
        if not m.strcmp(choice, 'y'):
            print 'Results not loaded... exiting'
            flag = 0
        else:
            flag = 1

    #job is running on the same machine
    else:

        if 'interactive' in vars(md.cluster) and md.cluster.interactive:
            #We are in interactive mode, no need to check for job completion
            flag = 1
            return flag
        #initialize time and file presence test flag
        etime = 0
        ispresent = 0
        print "waiting for '%s' hold on... (Ctrl+C to exit)" % filename

        #loop till file .lock exist or time is up
        while ispresent == 0 and etime < timelimit:
            ispresent = os.path.exists(filename)
            time.sleep(1)
            etime += 1 / 60

        #build output
        if etime > timelimit:
            print 'Time limit exceeded. Increase md.settings.waitonlock'
            print 'The results must be loaded manually with md=loadresultsfromcluster(md).'
            raise RuntimeError(
                'waitonlock error message: time limit exceeded.')
            flag = 0
        else:
            flag = 1

    return flag
Esempio n. 4
0
    def checkconsistency(self, md, solution, analyses):  # {{{
        if (not self.riftstruct) or numpy.any(isnans(self.riftstruct)):
            numrifts = 0
        else:
            numrifts = len(self.riftstruct)

        if numrifts:
            if not m.strcmp(md.mesh.domaintype(), '2Dhorizontal'):
                md.checkmessage(
                    "models with rifts are only supported in 2d for now!")
            if not isinstance(self.riftstruct, list):
                md.checkmessage("rifts.riftstruct should be a structure!")
            if numpy.any(md.mesh.segmentmarkers >= 2):
                #We have segments with rift markers, but no rift structure!
                md.checkmessage(
                    "model should be processed for rifts (run meshprocessrifts)!"
                )
            for i, rift in enumerate(self.riftstruct):
                md = checkfield(
                    md, 'fieldname', "rifts.riftstruct[%d]['fill']" % i,
                    'values',
                    [WaterEnum(),
                     AirEnum(),
                     IceEnum(),
                     MelangeEnum()])
        else:
            if self.riftstruct and numpy.any(
                    numpy.logical_not(isnans(self.riftstruct))):
                md.checkmessage(
                    "riftstruct should be NaN since numrifts is 0!")

        return md
Esempio n. 5
0
def list_display(offset, name, field, comment):  # {{{

    #initialization
    if isinstance(field, list):
        sbeg = '['
        send = ']'
    elif isinstance(field, tuple):
        sbeg = '('
        send = ')'
    string = sbeg

    #go through the cell and fill string
    if len(field) < 5:
        for fieldi in field:
            if isinstance(fieldi, (str, unicode)):
                string += "'%s'," % fieldi
            elif isinstance(fieldi, (bool, int, long, float)):
                string += "%s," % str(fieldi)
            else:
                string = sbeg
                break

    if m.strcmp(string, sbeg):
        string = "%s%dx1%s" % (sbeg, len(field), send)
    else:
        string = string[:-1] + send

    #call displayunit
    return displayunit(offset, name, string, comment)
Esempio n. 6
0
def loadresultsfromdisk(md,filename):
	"""
	LOADRESULTSFROMDISK - load results of solution sequence from disk file "filename"            
 
	   Usage:
	      md=loadresultsfromdisk(md=False,filename=False);
	"""

	#check number of inputs/outputs
	if not md or not filename:
		raise ValueError("loadresultsfromdisk: error message.")

	if not md.qmu.isdakota:

		#Check that file exists
		if not os.path.exists(filename):
			raise OSError("binary file '%s' not found." % filename)

		#initialize md.results if not a structure yet
		if not isinstance(md.results,results):
			md.results=results()

		#load results onto model
		structure=parseresultsfromdisk(filename,not md.settings.io_gather)
		if not len(structure):
			raise RuntimeError("No result found in binary file '%s'. Check for solution crash." % filename)
		setattr(md.results,structure[0].SolutionType,structure)

		#recover solution_type from results
		md.private.solution=structure[0].SolutionType

		#read log files onto fields
		if os.path.exists(md.miscellaneous.name+'.errlog'):
			with open(md.miscellaneous.name+'.errlog','r') as f:
				setattr(getattr(md.results,structure[0].SolutionType)[0],'errlog',[line[:-1] for line in f])
		else:
			setattr(getattr(md.results,structure[0].SolutionType)[0],'errlog',[])

		if os.path.exists(md.miscellaneous.name+'.outlog'):
			with open(md.miscellaneous.name+'.outlog','r') as f:
				setattr(getattr(md.results,structure[0].SolutionType)[0],'outlog',[line[:-1] for line in f])
		else:
			setattr(getattr(md.results,structure[0].SolutionType)[0],'outlog',[])

		if len(getattr(md.results,structure[0].SolutionType)[0].errlog):
			print ("loadresultsfromcluster info message: error during solution. Check your errlog and outlog model fields.")

		#if only one solution, extract it from list for user friendliness
		if len(structure) == 1 and not m.strcmp(structure[0].SolutionType,'TransientSolution'):
			setattr(md.results,structure[0].SolutionType,structure[0])

	#post processes qmu results if necessary
	else:

		if not isinstance(md.private.solution,str):
			[md.private.solution]=EnumToString(md.private.solution)
		md=postqmu(md)
		os.chdir('..')

	return md
Esempio n. 7
0
	def perform(self,string):    # {{{

		bool=False

		#Some checks
		if not isinstance(string,(str,unicode)):
			raise TypeError("Step provided should be a string")
		if not m.strcmp(string,string.strip()) or len(string.split()) > 1:
			raise TypeError("Step provided should not have any white space")
		if self._currentstep>0 and string in [step['string'] for step in self.steps]:
			raise RuntimeError("Step '%s' already present. Change name" % string) 

		#Add step
		self.steps.append(OrderedDict())
		self.steps[-1]['id']=len(self.steps)
		self.steps[-1]['string']=string
		self._currentstep+=1

		#if requestedsteps = 0, print all steps in self 
		if 0 in self.requestedsteps:
			if self._currentstep==1:
				print "   prefix: %s" % self.prefix
			print "   step #%i : %s" % (self.steps[self._currentstep-1]['id'],self.steps[self._currentstep-1]['string'])

		#Ok, now if _currentstep is a member of steps, return true
		if self._currentstep in self.requestedsteps:
			print "\n   step #%i : %s\n" % (self.steps[self._currentstep-1]['id'],self.steps[self._currentstep-1]['string'])
			bool=True

		#assign self back to calling workspace
		# (no need, since Python modifies class instance directly)

		return bool
Esempio n. 8
0
def triangle(md,domainname,*args):
	"""
	TRIANGLE - create model mesh using the triangle package

	   This routine creates a model mesh using TriMesh and a domain outline, to within a certain resolution
	   where md is a @model object, domainname is the name of an Argus domain outline file, 
	   and resolution is a characteristic length for the mesh (same unit as the domain outline
	   unit). Riftname is an optional argument (Argus domain outline) describing rifts.

	   Usage:
	      md=triangle(md,domainname,resolution)
	   or md=triangle(md,domainname, resolution, riftname)

	   Examples:
	      md=triangle(md,'DomainOutline.exp',1000);
	      md=triangle(md,'DomainOutline.exp',1000,'Rifts.exp');
	"""

	#Figure out a characteristic area. Resolution is a node oriented concept (ex a 1000m  resolution node would 
	#be made of 1000*1000 area squares). 

	if len(args)==1:
		resolution=args[0]
		riftname=''
	if len(args)==2:
		riftname=args[0]
		resolution=args[1]

	#Check that mesh was not already run, and warn user: 
	if md.mesh.numberofelements:
		choice = input('This model already has a mesh. Are you sure you want to go ahead? (y/n)')
		if not m.strcmp(choice,'y'):
			print('no meshing done ... exiting')
			return None

	area = resolution**2

	#Mesh using TriMesh
	md.mesh=mesh2d()
	[md.mesh.elements,md.mesh.x,md.mesh.y,md.mesh.segments,md.mesh.segmentmarkers]=TriMesh(domainname,riftname,area)
	md.mesh.elements=md.mesh.elements.astype(int)
	md.mesh.segments=md.mesh.segments.astype(int)
	md.mesh.segmentmarkers=md.mesh.segmentmarkers.astype(int)

	#Fill in rest of fields:
	md.mesh.numberofelements = numpy.size(md.mesh.elements,axis=0)
	md.mesh.numberofvertices = numpy.size(md.mesh.x)
	md.mesh.vertexonboundary = numpy.zeros(md.mesh.numberofvertices,bool)
	md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1] = True

	#Now, build the connectivity tables for this mesh.
	[md.mesh.vertexconnectivity] = NodeConnectivity(md.mesh.elements, md.mesh.numberofvertices)
	[md.mesh.elementconnectivity] = ElementConnectivity(md.mesh.elements, md.mesh.vertexconnectivity)

	return md
Esempio n. 9
0
    def __init__(self, **kwargs):  # {{{
        self._currentstep = 0
        self.repository = './'
        self.prefix = 'model.'
        self.trunkprefix = ''
        self.steps = []
        self.requestedsteps = [0]

        #process options
        options = pairoptions.pairoptions(**kwargs)

        #Get prefix
        prefix = options.getfieldvalue('prefix', 'model.')
        if not isinstance(prefix, str):
            raise TypeError("prefix is not a string")
        if not m.strcmp(prefix, prefix.strip()) or len(prefix.split()) > 1:
            raise TypeError("prefix should not have any white space")
        self.prefix = prefix

        #Get repository
        repository = options.getfieldvalue('repository', './')
        if not isinstance(repository, str):
            raise TypeError("repository is not a string")
        if not os.path.isdir(repository):
            raise IOError("Directory '%s' not found" % repository)
        self.repository = repository

        #Get steps
        self.requestedsteps = options.getfieldvalue('steps', [0])

        #Get trunk prefix (only if provided by user)
        if options.exist('trunkprefix'):
            trunkprefix = options.getfieldvalue('trunkprefix', '')
            if not isinstance(trunkprefix, str):
                raise TypeError("trunkprefix is not a string")
            if not m.strcmp(trunkprefix, trunkprefix.strip()) or len(
                    trunkprefix.split()) > 1:
                raise TypeError("trunkprefix should not have any white space")
            self.trunkprefix = trunkprefix
Esempio n. 10
0
	def checkconsistency(self,md,solution,analyses):    # {{{

		#Early return
		if (StressbalanceAnalysisEnum() not in analyses and StressbalanceSIAAnalysisEnum() not in analyses) or (solution==TransientSolutionEnum() and not md.transient.isstressbalance):
			return md

		md = checkfield(md,'fieldname','flowequation.isSIA','numel',[1],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.isSSA','numel',[1],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.isL1L2','numel',[1],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.isHO','numel',[1],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.isFS','numel',[1],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.fe_SSA','values',['P1','P1bubble','P1bubblecondensed','P2','P2bubble'])
		md = checkfield(md,'fieldname','flowequation.fe_HO' ,'values',['P1','P1bubble','P1bubblecondensed','P1xP2','P2xP1','P2','P2bubble','P1xP3','P2xP4'])
		md = checkfield(md,'fieldname','flowequation.fe_FS' ,'values',['P1P1','P1P1GLS','MINIcondensed','MINI','TaylorHood','XTaylorHood','OneLayerP4z','CrouzeixRaviart'])
		md = checkfield(md,'fieldname','flowequation.borderSSA','size',[md.mesh.numberofvertices],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.borderHO','size',[md.mesh.numberofvertices],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.borderFS','size',[md.mesh.numberofvertices],'values',[0,1])
		md = checkfield(md,'fieldname','flowequation.augmented_lagrangian_r','numel',[1],'>',0.)
		md = checkfield(md,'fieldname','flowequation.augmented_lagrangian_rhop','numel',[1],'>',0.)
		md = checkfield(md,'fieldname','flowequation.augmented_lagrangian_rlambda','numel',[1],'>',0.)
		md = checkfield(md,'fieldname','flowequation.augmented_lagrangian_rholambda','numel',[1],'>',0.)
		md = checkfield(md,'fieldname','flowequation.XTH_theta','numel',[1],'>=',0.,'<',.5)
		if m.strcmp(md.mesh.domaintype(),'2Dhorizontal'):
			md = checkfield(md,'fieldname','flowequation.vertex_equation','size',[md.mesh.numberofvertices],'values',[1,2])
			md = checkfield(md,'fieldname','flowequation.element_equation','size',[md.mesh.numberofelements],'values',[1,2])
		elif m.strcmp(md.mesh.domaintype(),'3D'):
			md = checkfield(md,'fieldname','flowequation.vertex_equation','size',[md.mesh.numberofvertices],'values',numpy.arange(0,8+1))
			md = checkfield(md,'fieldname','flowequation.element_equation','size',[md.mesh.numberofelements],'values',numpy.arange(0,8+1))
		else:
			raise RuntimeError('mesh type not supported yet')
		if not (self.isSIA or self.isSSA or self.isL1L2 or self.isHO or self.isFS):
			md.checkmessage("no element types set for this model")

		if StressbalanceSIAAnalysisEnum() in analyses:
			if any(self.element_equation==1):
				if numpy.any(numpy.logical_and(self.vertex_equation,md.mask.groundedice_levelset)):
					print("\n !!! Warning: SIA's model is not consistent on ice shelves !!!\n")

		return md
Esempio n. 11
0
    def checkconsistency(self, md, solution, analyses):  # {{{

        md = checkfield(md, 'fieldname', 'groundingline.migration', 'values', [
            'None', 'AggressiveMigration', 'SoftMigration',
            'SubelementMigration', 'SubelementMigration2', 'Contact',
            'GroundingOnly'
        ])

        if not m.strcmp(self.migration, 'None'):
            if np.any(np.isnan(md.geometry.bed)):
                md.checkmessage(
                    "requesting grounding line migration, but bathymetry is absent!"
                )
            pos = np.nonzero(md.mask.groundedice_levelset > 0.)[0]
            if any(
                    np.abs(md.geometry.base[pos] -
                           md.geometry.bed[pos]) > 10**-10):
                md.checkmessage("base not equal to bed on grounded ice!")
            if any(md.geometry.bed - md.geometry.base > 10**-9):
                md.checkmessage("bed superior to base on floating ice!")

        return md
Esempio n. 12
0
def FlagElements(md, region):
    """
	FLAGELEMENTS - flag the elements in an region

	   The region can be given with an exp file, a list of elements or vertices

	   Usage: 
	      flag=FlagElements(md,region);

	   Example:
	      flag=FlagElements(md,'all');
	      flag=FlagElements(md,'');
	      flag=FlagElements(md,'Domain.exp');
	      flag=FlagElements(md,'~Domain.exp');
	"""

    if isinstance(region, (str, unicode)):
        if not region:
            flag = np.zeros(md.mesh.numberofelements, bool)
            invert = 0
        elif m.strcmpi(region, 'all'):
            flag = np.ones(md.mesh.numberofelements, bool)
            invert = 0
        else:
            #make sure that we actually don't want the elements outside the domain outline!
            if m.strcmpi(region[0], '~'):
                region = region[1:]
                invert = 1
            else:
                invert = 0

            #does the region domain outline exist or do we have to look for xlim,ylim in basinzoom?
            if not os.path.exists(region):
                if len(region) > 3 and not m.strcmp(region[-4:], '.exp'):
                    raise IOError("Error: File 'region' not found!" % region)
                raise RuntimeError(
                    "FlagElements.py calling basinzoom.py is not complete.")
                xlim, ylim = basinzoom('basin', region)
                flag_nodes = p.logical_and_n(md.mesh.x < xlim[1],
                                             md.mesh.x > xlim[0],
                                             md.mesh.y < ylim[1],
                                             md.mesh.y > ylim[0])
                flag = np.prod(flag_nodes[md.mesh.elements],
                               axis=1).astype(bool)
            else:
                #ok, flag elements
                flag = ContourToMesh(md.mesh.elements[:,
                                                      0:3].copy(), md.mesh.x,
                                     md.mesh.y, region, 'element', 1)
                flag = flag.astype(bool)

        if invert:
            flag = np.logical_not(flag)

    elif isinstance(region, np.ndarray) or isinstance(region, bool):
        if np.size(region, 0) == md.mesh.numberofelements:
            flag = region
        elif np.size(region, 0) == md.mesh.numberofvertices:
            flag = (np.sum(region[md.mesh.elements - 1] > 0,
                           axis=1) == np.size(md.mesh.elements, 1))
        else:
            raise TypeError(
                "Flaglist for region must be of same size as number of elements in model."
            )

    else:
        raise TypeError("Invalid region option")

    return flag
Esempio n. 13
0
File: bamg.py Progetto: pf4d/issm
def bamg(md, *kwargs):
    """
	BAMG - mesh generation

	   Available options (for more details see ISSM website http://issm.jpl.nasa.gov/):

	   - domain :            followed by an ARGUS file that prescribes the domain outline
	   - hmin :              minimum edge length (default is 10^-100)
	   - hmax :              maximum edge length (default is 10^100)
	   - hVertices :         imposed edge length for each vertex (geometry or mesh)
	   - hminVertices :      minimum edge length for each vertex (mesh)
	   - hmaxVertices :      maximum edge length for each vertex (mesh)

	   - anisomax :          maximum ratio between the smallest and largest edges (default is 10^30)
	   - coeff :             coefficient applied to the metric (2-> twice as many elements, default is 1)
	   - cutoff :            scalar used to compute the metric when metric type 2 or 3 are applied
	   - err :               error used to generate the metric from a field
	   - errg :              geometric error (default is 0.1)
	   - field :             field of the model that will be used to compute the metric
	                         to apply several fields, use one column per field
	   - gradation :         maximum ratio between two adjacent edges
	   - Hessiantype :       0 -> use double P2 projection (default)
	                         1 -> use Green formula
	   - KeepVertices :      try to keep initial vertices when adaptation is done on an existing mesh (default 1)
	   - MaxCornerAngle :    maximum angle of corners in degree (default is 10)
	   - maxnbv :            maximum number of vertices used to allocate memory (default is 10^6)
	   - maxsubdiv :         maximum subdivision of exisiting elements (default is 10)
	   - metric :            matrix (numberofnodes x 3) used as a metric
	   - Metrictype :        1 -> absolute error          c/(err coeff^2) * Abs(H)        (default)
	                         2 -> relative error          c/(err coeff^2) * Abs(H)/max(s,cutoff*max(s))
	                         3 -> rescaled absolute error c/(err coeff^2) * Abs(H)/(smax-smin)
	   - nbjacoby :          correction used by Hessiantype=1 (default is 1)
	   - nbsmooth :          number of metric smoothing procedure (default is 3)
	   - omega :             relaxation parameter of the smoothing procedure (default is 1.8)
	   - power :             power applied to the metric (default is 1)
	   - splitcorners :      split triangles whuch have 3 vertices on the outline (default is 1)
	   - geometricalmetric : take the geometry into account to generate the metric (default is 0)
	   - verbose :           level of verbosity (default is 1)

	   - rifts :             followed by an ARGUS file that prescribes the rifts
	   - toltip :            tolerance to move tip on an existing point of the domain outline
	   - tracks :            followed by an ARGUS file that prescribes the tracks that the mesh will stick to
	   - RequiredVertices :  mesh vertices that are required. [x,y,ref]; ref is optional
	   - tol :               if the distance between 2 points of the domain outline is less than tol, they
	                         will be merged

	   Examples:
	      md=bamg(md,'domain','DomainOutline.exp','hmax',3000);
	      md=bamg(md,'field',[md.inversion.vel_obs md.geometry.thickness],'hmax',20000,'hmin',1000);
	      md=bamg(md,'metric',A,'hmin',1000,'hmax',20000,'gradation',3,'anisomax',1);
	"""

    #process options
    options = pairoptions(**kwargs)
    #	options=deleteduplicates(options,1);

    #initialize the structures required as input of Bamg
    bamg_options = OrderedDict()
    bamg_geometry = bamggeom()
    bamg_mesh = bamgmesh()

    # Bamg Geometry parameters {{{
    if options.exist('domain'):

        #Check that file exists
        domainfile = options.getfieldvalue('domain')
        if not os.path.exists(domainfile):
            raise IOError("bamg error message: file '%s' not found" %
                          domainfile)
        domain = expread(domainfile)

        #Build geometry
        count = 0
        for i, domaini in enumerate(domain):

            #Check that the domain is closed
            if (domaini['x'][0] != domaini['x'][-1]
                    or domaini['y'][0] != domaini['y'][-1]):
                raise RuntimeError(
                    "bamg error message: all contours provided in ''domain'' should be closed"
                )

            #Checks that all holes are INSIDE the principle domain outline
            if i:
                flags = ContourToNodes(domaini['x'], domaini['y'], domainfile,
                                       0)
                if numpy.any(numpy.logical_not(flags)):
                    raise RuntimeError(
                        "bamg error message: All holes should be strictly inside the principal domain"
                    )

            #Add all points to bamg_geometry
            nods = domaini['nods'] - 1  #the domain are closed 0=end
            bamg_geometry.Vertices = numpy.vstack(
                (bamg_geometry.Vertices,
                 numpy.hstack(
                     (domaini['x'][0:nods].reshape(-1, 1),
                      domaini['y'][0:nods].reshape(-1, 1), numpy.ones(
                          (nods, 1))))))
            bamg_geometry.Edges = numpy.vstack(
                (bamg_geometry.Edges,
                 numpy.hstack(
                     (numpy.arange(count + 1, count + nods + 1).reshape(-1, 1),
                      numpy.hstack((numpy.arange(count + 2, count + nods + 1),
                                    count + 1)).reshape(-1,
                                                        1), 1. * numpy.ones(
                                                            (nods, 1))))))
            if i:
                bamg_geometry.SubDomains = numpy.vstack(
                    (bamg_geometry.SubDomains, [2, count + 1, 1, 1]))

            #update counter
            count += nods

        #take care of rifts
        if options.exist('rifts'):

            #Check that file exists
            riftfile = options.getfieldvalue('rifts')
            if not os.path.exists(riftfile):
                raise IOError("bamg error message: file '%s' not found" %
                              riftfile)
            rift = expread(riftfile)

            for i, rifti in enumerate(rift):

                #detect whether all points of the rift are inside the domain
                flags = ContourToNodes(rifti['x'], rifti['y'], domain[0], 0)
                if numpy.all(numpy.logical_not(flags)):
                    raise RuntimeError(
                        "one rift has all its points outside of the domain outline"
                    )

                elif numpy.any(numpy.logical_not(flags)):
                    #We LOTS of work to do
                    print(
                        "Rift tip outside of or on the domain has been detected and is being processed..."
                    )

                    #check that only one point is outside (for now)
                    if numpy.sum(numpy.logical_not(flags).astype(int)) != 1:
                        raise RuntimeError(
                            "bamg error message: only one point outside of the domain is supported yet"
                        )

                    #Move tip outside to the first position
                    if not flags[0]:
                        #OK, first point is outside (do nothing),
                        pass
                    elif not flags[-1]:
                        rifti['x'] = numpy.flipud(rifti['x'])
                        rifti['y'] = numpy.flipud(rifti['y'])
                    else:
                        raise RuntimeError(
                            "bamg error message: only a rift tip can be outside of the domain"
                        )

                    #Get cordinate of intersection point
                    x1 = rifti['x'][0]
                    y1 = rifti['y'][0]
                    x2 = rifti['x'][1]
                    y2 = rifti['y'][1]
                    for j in range(0, numpy.size(domain[0]['x']) - 1):
                        if SegIntersect(
                                numpy.array([[x1, y1], [x2, y2]]),
                                numpy.array(
                                    [[domain[0]['x'][j], domain[0]['y'][j]],
                                     [
                                         domain[0]['x'][j + 1],
                                         domain[0]['y'][j + 1]
                                     ]])):

                            #Get position of the two nodes of the edge in domain
                            i1 = j
                            i2 = j + 1

                            #rift is crossing edge [i1 i2] of the domain
                            #Get coordinate of intersection point (http://mathworld.wolfram.com/Line-LineIntersection.html)
                            x3 = domain[0]['x'][i1]
                            y3 = domain[0]['y'][i1]
                            x4 = domain[0]['x'][i2]
                            y4 = domain[0]['y'][i2]
                            #							x=det([det([x1 y1; x2 y2])  x1-x2;det([x3 y3; x4 y4])  x3-x4])/det([x1-x2 y1-y2;x3-x4 y3-y4]);
                            #							y=det([det([x1 y1; x2 y2])  y1-y2;det([x3 y3; x4 y4])  y3-y4])/det([x1-x2 y1-y2;x3-x4 y3-y4]);
                            x = numpy.linalg.det(
                                numpy.array([[
                                    numpy.linalg.det(
                                        numpy.array([[x1, y1], [x2, y2]])),
                                    x1 - x2
                                ],
                                             [
                                                 numpy.linalg.det(
                                                     numpy.array([[x3, y3],
                                                                  [x4, y4]])),
                                                 x3 - x4
                                             ]])) / numpy.linalg.det(
                                                 numpy.array([[
                                                     x1 - x2, y1 - y2
                                                 ], [x3 - x4, y3 - y4]]))
                            y = numpy.linalg.det(
                                numpy.array([[
                                    numpy.linalg.det(
                                        numpy.array([[x1, y1], [x2, y2]])),
                                    y1 - y2
                                ],
                                             [
                                                 numpy.linalg.det(
                                                     numpy.array([[x3, y3],
                                                                  [x4, y4]])),
                                                 y3 - y4
                                             ]])) / numpy.linalg.det(
                                                 numpy.array([[
                                                     x1 - x2, y1 - y2
                                                 ], [x3 - x4, y3 - y4]]))

                            segdis = sqrt((x4 - x3)**2 + (y4 - y3)**2)
                            tipdis = numpy.array([
                                sqrt((x - x3)**2 + (y - y3)**2),
                                sqrt((x - x4)**2 + (y - y4)**2)
                            ])

                            if numpy.min(
                                    tipdis) / segdis < options.getfieldvalue(
                                        'toltip', 0):
                                print("moving tip-domain intersection point")

                                #Get position of the closer point
                                if tipdis[0] > tipdis[1]:
                                    pos = i2
                                else:
                                    pos = i1

                                #This point is only in Vertices (number pos).
                                #OK, now we can add our own rift
                                nods = rifti['nods'] - 1
                                bamg_geometry.Vertices = numpy.vstack(
                                    (bamg_geometry.Vertices,
                                     numpy.hstack(
                                         (rifti['x'][1:].reshape(-1, 1),
                                          rifti['y'][1:].reshape(-1, 1),
                                          numpy.ones((nods, 1))))))
                                bamg_geometry.Edges=numpy.vstack((bamg_geometry.Edges,\
                                 numpy.array([[pos,count+1,(1+i)]]),\
                                 numpy.hstack((numpy.arange(count+1,count+nods).reshape(-1,1),numpy.arange(count+2,count+nods+1).reshape(-1,1),(1+i)*numpy.ones((nods-1,1))))))
                                count += nods

                                break

                            else:
                                #Add intersection point to Vertices
                                bamg_geometry.Vertices = numpy.vstack(
                                    (bamg_geometry.Vertices,
                                     numpy.array([[x, y, 1]])))
                                count += 1

                                #Decompose the crossing edge into 2 subedges
                                pos = numpy.nonzero(
                                    numpy.logical_and(
                                        bamg_geometry.Edges[:, 0] == i1,
                                        bamg_geometry.Edges[:, 1] == i2))[0]
                                if not pos:
                                    raise RuntimeError(
                                        "bamg error message: a problem occurred..."
                                    )
                                bamg_geometry.Edges=numpy.vstack((bamg_geometry.Edges[0:pos-1,:],\
                                 numpy.array([[bamg_geometry.Edges[pos,0],count                     ,bamg_geometry.Edges[pos,2]]]),\
                                 numpy.array([[count                     ,bamg_geometry.Edges[pos,1],bamg_geometry.Edges[pos,2]]]),\
                                 bamg_geometry.Edges[pos+1:,:]))

                                #OK, now we can add our own rift
                                nods = rifti['nods'] - 1
                                bamg_geometry.Vertices = numpy.vstack(
                                    (bamg_geometry.Vertices,
                                     numpy.hstack(
                                         (rifti['x'][1:].reshape(-1, 1),
                                          rifti['y'][1:].reshape(-1, 1),
                                          numpy.ones((nods, 1))))))
                                bamg_geometry.Edges=numpy.vstack((bamg_geometry.Edges,\
                                 numpy.array([[count,count+1,2]]),\
                                 numpy.hstack((numpy.arange(count+1,count+nods).reshape(-1,1),numpy.arange(count+2,count+nods+1).reshape(-1,1),(1+i)*numpy.ones((nods-1,1))))))
                                count += nods

                                break

                else:
                    nods = rifti['nods'] - 1
                    bamg_geometry.Vertices = numpy.vstack(
                        bamg_geometry.Vertices,
                        numpy.hstack(rifti['x'][:], rifti['y'][:],
                                     numpy.ones((nods + 1, 1))))
                    bamg_geometry.Edges = numpy.vstack(
                        bamg_geometry.Edges,
                        numpy.hstack(
                            numpy.arange(count + 1,
                                         count + nods).reshape(-1, 1),
                            numpy.arange(count + 2,
                                         count + nods + 1).reshape(-1, 1),
                            i * numpy.ones((nods, 1))))
                    count = +nods + 1

        #Deal with tracks
        if options.exist('tracks'):

            #read tracks
            track = options.getfieldvalue('tracks')
            if all(isinstance(track, str)):
                A = expread(track)
                track = numpy.hstack((A.x.reshape(-1, 1), A.y.reshape(-1, 1)))
            else:
                track = float(track)  #for some reason, it is of class "single"
            if numpy.size(track, axis=1) == 2:
                track = numpy.hstack((track, 3. * numpy.ones(
                    (size(track, axis=0), 1))))

            #only keep those inside
            flags = ContourToNodes(track[:, 0], track[:, 1], domainfile, 0)
            track = track[numpy.nonzero(flags), :]

            #Add all points to bamg_geometry
            nods = numpy.size(track, axis=0)
            bamg_geometry.Vertices = numpy.vstack(
                (bamg_geometry.Vertices, track))
            bamg_geometry.Edges = numpy.vstack(
                (bamg_geometry.Edges,
                 numpy.hstack(
                     (numpy.arange(count + 1, count + nods).reshape(-1, 1),
                      numpy.arange(count + 2, count + nods + 1).reshape(-1, 1),
                      3. * numpy.ones((nods - 1, 1))))))

            #update counter
            count += nods

        #Deal with vertices that need to be kept by mesher
        if options.exist('RequiredVertices'):

            #recover RequiredVertices
            requiredvertices = options.getfieldvalue(
                'RequiredVertices')  #for some reason, it is of class "single"
            if numpy.size(requiredvertices, axis=1) == 2:
                requiredvertices = numpy.hstack(
                    (requiredvertices, 4. * numpy.ones(
                        (numpy.size(requiredvertices, axis=0), 1))))

            #only keep those inside
            flags = ContourToNodes(requiredvertices[:, 0],
                                   requiredvertices[:, 1], domainfile, 0)[0]
            requiredvertices = requiredvertices[numpy.nonzero(flags)[0], :]

            #Add all points to bamg_geometry
            nods = numpy.size(requiredvertices, axis=0)
            bamg_geometry.Vertices = numpy.vstack(
                (bamg_geometry.Vertices, requiredvertices))

            #update counter
            count += nods

        #process geom
        #bamg_geometry=processgeometry(bamg_geometry,options.getfieldvalue('tol',float(nan)),domain[0])

    elif isinstance(md.private.bamg, dict) and 'geometry' in md.private.bamg:
        bamg_geometry = bamggeom(md.private.bamg['geometry'].__dict__)
    else:
        #do nothing...
        pass
    #}}}
    # Bamg Mesh parameters {{{
    if not options.exist('domain') and md.mesh.numberofvertices and m.strcmp(
            md.mesh.elementtype(), 'Tria'):

        if isinstance(md.private.bamg, dict) and 'mesh' in md.private.bamg:
            bamg_mesh = bamgmesh(md.private.bamg['mesh'].__dict__)
        else:
            bamg_mesh.Vertices = numpy.hstack(
                (md.mesh.x.reshape(-1, 1), md.mesh.y.reshape(-1, 1),
                 numpy.ones((md.mesh.numberofvertices, 1))))
            bamg_mesh.Triangles = numpy.hstack(
                (md.mesh.elements, numpy.ones((md.mesh.numberofelements, 1))))

        if isinstance(md.rifts.riftstruct, dict):
            raise TypeError(
                "bamg error message: rifts not supported yet. Do meshprocessrift AFTER bamg"
            )
    #}}}
    # Bamg Options {{{
    bamg_options['Crack'] = options.getfieldvalue('Crack', 0)
    bamg_options['anisomax'] = options.getfieldvalue('anisomax', 10.**30)
    bamg_options['coeff'] = options.getfieldvalue('coeff', 1.)
    bamg_options['cutoff'] = options.getfieldvalue('cutoff', 10.**-5)
    bamg_options['err'] = options.getfieldvalue('err', numpy.array([[0.01]]))
    bamg_options['errg'] = options.getfieldvalue('errg', 0.1)
    bamg_options['field'] = options.getfieldvalue('field', numpy.empty((0, 1)))
    bamg_options['gradation'] = options.getfieldvalue('gradation', 1.5)
    bamg_options['Hessiantype'] = options.getfieldvalue('Hessiantype', 0)
    bamg_options['hmin'] = options.getfieldvalue('hmin', 10.**-100)
    bamg_options['hmax'] = options.getfieldvalue('hmax', 10.**100)
    bamg_options['hminVertices'] = options.getfieldvalue(
        'hminVertices', numpy.empty((0, 1)))
    bamg_options['hmaxVertices'] = options.getfieldvalue(
        'hmaxVertices', numpy.empty((0, 1)))
    bamg_options['hVertices'] = options.getfieldvalue('hVertices',
                                                      numpy.empty((0, 1)))
    bamg_options['KeepVertices'] = options.getfieldvalue('KeepVertices', 1)
    bamg_options['MaxCornerAngle'] = options.getfieldvalue(
        'MaxCornerAngle', 10.)
    bamg_options['maxnbv'] = options.getfieldvalue('maxnbv', 10**6)
    bamg_options['maxsubdiv'] = options.getfieldvalue('maxsubdiv', 10.)
    bamg_options['metric'] = options.getfieldvalue('metric', numpy.empty(
        (0, 1)))
    bamg_options['Metrictype'] = options.getfieldvalue('Metrictype', 0)
    bamg_options['nbjacobi'] = options.getfieldvalue('nbjacobi', 1)
    bamg_options['nbsmooth'] = options.getfieldvalue('nbsmooth', 3)
    bamg_options['omega'] = options.getfieldvalue('omega', 1.8)
    bamg_options['power'] = options.getfieldvalue('power', 1.)
    bamg_options['splitcorners'] = options.getfieldvalue('splitcorners', 1)
    bamg_options['geometricalmetric'] = options.getfieldvalue(
        'geometricalmetric', 0)
    bamg_options['random'] = options.getfieldvalue('rand', True)
    bamg_options['verbose'] = options.getfieldvalue('verbose', 1)
    #}}}

    #call Bamg
    [bamgmesh_out,
     bamggeom_out] = BamgMesher(bamg_mesh.__dict__, bamg_geometry.__dict__,
                                bamg_options)

    # plug results onto model
    md.private.bamg = OrderedDict()
    md.private.bamg['mesh'] = bamgmesh(bamgmesh_out)
    md.private.bamg['geometry'] = bamggeom(bamggeom_out)
    md.mesh = mesh2d()
    md.mesh.x = bamgmesh_out['Vertices'][:, 0].copy()
    md.mesh.y = bamgmesh_out['Vertices'][:, 1].copy()
    md.mesh.elements = bamgmesh_out['Triangles'][:, 0:3].astype(int)
    md.mesh.edges = bamgmesh_out['IssmEdges'].astype(int)
    md.mesh.segments = bamgmesh_out['IssmSegments'][:, 0:3].astype(int)
    md.mesh.segmentmarkers = bamgmesh_out['IssmSegments'][:, 3].astype(int)

    #Fill in rest of fields:
    md.mesh.numberofelements = numpy.size(md.mesh.elements, axis=0)
    md.mesh.numberofvertices = numpy.size(md.mesh.x)
    md.mesh.numberofedges = numpy.size(md.mesh.edges, axis=0)
    md.mesh.vertexonboundary = numpy.zeros(md.mesh.numberofvertices, bool)
    md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = True
    md.mesh.elementconnectivity = md.private.bamg['mesh'].ElementConnectivity
    md.mesh.elementconnectivity[numpy.nonzero(
        numpy.isnan(md.mesh.elementconnectivity))] = 0
    md.mesh.elementconnectivity = md.mesh.elementconnectivity.astype(int)

    #Check for orphan
    if numpy.any(
            numpy.logical_not(
                numpy.in1d(numpy.arange(1, md.mesh.numberofvertices + 1),
                           md.mesh.elements.flat))):
        raise RuntimeError(
            "Output mesh has orphans. Decrease MaxCornerAngle to prevent outside points (ex: 0.01)"
        )

    return md
Esempio n. 14
0
    def checkconsistency(self, md, solution, analyses):  # {{{

        #Early return
        if StressbalanceAnalysisEnum() not in analyses:
            return md

        md = checkfield(md, 'fieldname', 'stressbalance.spcvx', 'Inf', 1,
                        'timeseries', 1)
        md = checkfield(md, 'fieldname', 'stressbalance.spcvy', 'Inf', 1,
                        'timeseries', 1)
        if m.strcmp(md.mesh.domaintype(), '3D'):
            md = checkfield(md, 'fieldname', 'stressbalance.spcvz', 'Inf', 1,
                            'timeseries', 1)
        md = checkfield(md, 'fieldname', 'stressbalance.restol', 'size', [1],
                        '>', 0)
        md = checkfield(md, 'fieldname', 'stressbalance.reltol', 'size', [1])
        md = checkfield(md, 'fieldname', 'stressbalance.abstol', 'size', [1])
        md = checkfield(md, 'fieldname', 'stressbalance.isnewton', 'numel',
                        [1], 'values', [0, 1, 2])
        md = checkfield(md, 'fieldname', 'stressbalance.FSreconditioning',
                        'size', [1], 'NaN', 1, 'Inf', 1)
        md = checkfield(md, 'fieldname', 'stressbalance.viscosity_overshoot',
                        'size', [1], 'NaN', 1, 'Inf', 1)
        md = checkfield(md, 'fieldname', 'stressbalance.maxiter', 'size', [1],
                        '>=', 1)
        md = checkfield(md, 'fieldname', 'stressbalance.referential', 'size',
                        [md.mesh.numberofvertices, 6])
        md = checkfield(md, 'fieldname', 'stressbalance.loadingforce', 'size',
                        [md.mesh.numberofvertices, 3])
        md = checkfield(md, 'fieldname', 'stressbalance.requested_outputs',
                        'stringrow', 1)

        #singular solution
        #		if ~any((~isnan(md.stressbalance.spcvx)+~isnan(md.stressbalance.spcvy))==2),
        if not numpy.any(
                numpy.logical_and(
                    numpy.logical_not(numpy.isnan(md.stressbalance.spcvx)),
                    numpy.logical_not(numpy.isnan(md.stressbalance.spcvy)))):
            print(
                "\n !!! Warning: no spc applied, model might not be well posed if no basal friction is applied, check for solution crash\n"
            )
        #CHECK THAT EACH LINES CONTAINS ONLY NAN VALUES OR NO NAN VALUES
#		if any(sum(isnan(md.stressbalance.referential),2)~=0 & sum(isnan(md.stressbalance.referential),2)~=6),
        if numpy.any(
                numpy.logical_and(
                    numpy.sum(numpy.isnan(md.stressbalance.referential),
                              axis=1) != 0,
                    numpy.sum(numpy.isnan(md.stressbalance.referential),
                              axis=1) != 6)):
            md.checkmessage(
                "Each line of stressbalance.referential should contain either only NaN values or no NaN values"
            )
        #CHECK THAT THE TWO VECTORS PROVIDED ARE ORTHOGONAL


#		if any(sum(isnan(md.stressbalance.referential),2)==0),
        if numpy.any(
                numpy.sum(numpy.isnan(md.stressbalance.referential), axis=1) ==
                0):
            pos = [
                i for i, item in enumerate(
                    numpy.sum(numpy.isnan(md.stressbalance.referential),
                              axis=1)) if item == 0
            ]
            #			numpy.inner (and numpy.dot) calculate all the dot product permutations, resulting in a full matrix multiply
            #			if numpy.any(numpy.abs(numpy.inner(md.stressbalance.referential[pos,0:2],md.stressbalance.referential[pos,3:5]).diagonal())>sys.float_info.epsilon):
            #				md.checkmessage("Vectors in stressbalance.referential (columns 1 to 3 and 4 to 6) must be orthogonal")
            for item in md.stressbalance.referential[pos, :]:
                if numpy.abs(numpy.inner(item[0:2],
                                         item[3:5])) > sys.float_info.epsilon:
                    md.checkmessage(
                        "Vectors in stressbalance.referential (columns 1 to 3 and 4 to 6) must be orthogonal"
                    )
        #CHECK THAT NO rotation specified for FS Grounded ice at base
        if m.strcmp(md.mesh.domaintype(), '3D') and md.flowequation.isFS:
            pos = numpy.nonzero(
                numpy.logical_and(md.mask.groundedice_levelset,
                                  md.mesh.vertexonbase))
            if numpy.any(
                    numpy.logical_not(
                        numpy.isnan(md.stressbalance.referential[pos, :]))):
                md.checkmessage(
                    "no referential should be specified for basal vertices of grounded ice"
                )

        return md
Esempio n. 15
0
File: expread.py Progetto: pf4d/issm
def expread(filename):

	"""

	EXPREAD - read a file exp and build a Structure

	   This routine reads a file .exp and builds a list of dicts containing the 
	   fields x and y corresponding to the coordinates, one for the filename of
	   the exp file, for the density, for the nodes, and a field closed to 
	   indicate if the domain is closed. 
	   The first argument is the .exp file to be read and the second one (optional) 
	   indicate if the last point shall be read (1 to read it, 0 not to).

	   Usage:
	      contours=expread(filename)

	   Example:
	      contours=expread('domainoutline.exp')
	      contours=expread('domainoutline.exp')

	   See also EXPDOC, EXPWRITEASVERTICES

	"""
	#some checks
	if not os.path.exists(filename):
		raise OSError("expread error message: file '%s' not found!" % filename)

	#initialize number of profile
	contours=[]
	#open file
	fid=open(filename,'r')
	#loop over the number of profiles
	while True:
		#update number of profiles
		contour=OrderedDict()
		#Get file name
		A=fid.readline()
		while A=='\n':
			A=fid.readline()
		if not A:
			break
		A=A.split(None,1)
		if not (len(A) == 2 and m.strcmp(A[0],'##') and m.strncmp(A[1],'Name:',5)):
			break

		if len(A[1])>5: 
			contour['name']=A[1][5:-1]
		else:
			contour['name']=''

		#Get Icon
		A=fid.readline().split(None,1)
		if not (len(A) == 2 and m.strcmp(A[0],'##') and m.strncmp(A[1],'Icon:',5)):
			break
		#Get Info
		A=fid.readline().split()
		if not (len(A) == 4 and m.strcmp(A[0],'#') and m.strcmp(A[1],'Points')):
			break

		#Get number of nodes and density
		A=fid.readline().split()
		contour['nods']=int(A[0])
		contour['density']=float(A[1])

		#Get Info
		A=fid.readline().split()
		if not (len(A) == 5 and m.strcmp(A[0],'#') and m.strcmp(A[1],'X') and m.strcmp(A[2],'pos') 
						and m.strcmp(A[3],'Y') and m.strcmp(A[4],'pos')):
			break
		#Get Coordinates
		contour['x']=np.empty(contour['nods'])
		contour['y']=np.empty(contour['nods'])
		for i in xrange(int(contour['nods'])):
			A=fid.readline().split()
			contour['x'][i]=float(A[0])
			contour['y'][i]=float(A[1])

		#Check if closed
		if (contour['nods'] > 1) and \
		   (contour['x'][-1] == contour['x'][0]) and \
		   (contour['y'][-1] == contour['y'][0]):
			contour['closed']=True
		else:
			contour['closed']=False

		contours.append(contour)
	#close file
	fid.close()
	return contours
Esempio n. 16
0
File: model.py Progetto: pf4d/issm
    def extract(md, area):  # {{{
        """
		extract - extract a model according to an Argus contour or flag list

		   This routine extracts a submodel from a bigger model with respect to a given contour
		   md must be followed by the corresponding exp file or flags list
		   It can either be a domain file (argus type, .exp extension), or an array of element flags. 
		   If user wants every element outside the domain to be 
		   extract2d, add '~' to the name of the domain file (ex: '~HO.exp')
		   an empty string '' will be considered as an empty domain
		   a string 'all' will be considered as the entire domain

		   Usage:
		      md2=extract(md,area)

		   Examples:
		      md2=extract(md,'Domain.exp')

		   See also: EXTRUDE, COLLAPSE
		"""

        #copy model
        md1 = copy.deepcopy(md)

        #get elements that are inside area
        flag_elem = FlagElements(md1, area)
        if not np.any(flag_elem):
            raise RuntimeError("extracted model is empty")

        #kick out all elements with 3 dirichlets
        spc_elem = np.nonzero(np.logical_not(flag_elem))[0]
        spc_node = np.unique(md1.mesh.elements[spc_elem, :]) - 1
        flag = np.ones(md1.mesh.numberofvertices)
        flag[spc_node] = 0
        pos = np.nonzero(
            np.logical_not(np.sum(flag[md1.mesh.elements - 1], axis=1)))[0]
        flag_elem[pos] = 0

        #extracted elements and nodes lists
        pos_elem = np.nonzero(flag_elem)[0]
        pos_node = np.unique(md1.mesh.elements[pos_elem, :]) - 1

        #keep track of some fields
        numberofvertices1 = md1.mesh.numberofvertices
        numberofelements1 = md1.mesh.numberofelements
        numberofvertices2 = np.size(pos_node)
        numberofelements2 = np.size(pos_elem)
        flag_node = np.zeros(numberofvertices1)
        flag_node[pos_node] = 1

        #Create Pelem and Pnode (transform old nodes in new nodes and same thing for the elements)
        Pelem = np.zeros(numberofelements1, int)
        Pelem[pos_elem] = np.arange(1, numberofelements2 + 1)
        Pnode = np.zeros(numberofvertices1, int)
        Pnode[pos_node] = np.arange(1, numberofvertices2 + 1)

        #renumber the elements (some node won't exist anymore)
        elements_1 = copy.deepcopy(md1.mesh.elements)
        elements_2 = elements_1[pos_elem, :]
        elements_2[:, 0] = Pnode[elements_2[:, 0] - 1]
        elements_2[:, 1] = Pnode[elements_2[:, 1] - 1]
        elements_2[:, 2] = Pnode[elements_2[:, 2] - 1]
        if md1.mesh.__class__.__name__ == 'mesh3dprisms':
            elements_2[:, 3] = Pnode[elements_2[:, 3] - 1]
            elements_2[:, 4] = Pnode[elements_2[:, 4] - 1]
            elements_2[:, 5] = Pnode[elements_2[:, 5] - 1]

        #OK, now create the new model!

        #take every field from model
        md2 = copy.deepcopy(md1)

        #automatically modify fields

        #loop over model fields
        model_fields = vars(md1)
        for fieldi in model_fields:
            #get field
            field = getattr(md1, fieldi)
            fieldsize = np.shape(field)
            if hasattr(field, '__dict__') and not m.ismember(
                    fieldi, ['results'])[0]:  #recursive call
                object_fields = vars(field)
                for fieldj in object_fields:
                    #get field
                    field = getattr(getattr(md1, fieldi), fieldj)
                    fieldsize = np.shape(field)
                    if len(fieldsize):
                        #size = number of nodes * n
                        if fieldsize[0] == numberofvertices1:
                            setattr(getattr(md2, fieldi), fieldj,
                                    field[pos_node])
                        elif fieldsize[0] == numberofvertices1 + 1:
                            setattr(getattr(md2, fieldi), fieldj,
                                    np.vstack((field[pos_node], field[-1, :])))
                        #size = number of elements * n
                        elif fieldsize[0] == numberofelements1:
                            setattr(getattr(md2, fieldi), fieldj,
                                    field[pos_elem])
            else:
                if len(fieldsize):
                    #size = number of nodes * n
                    if fieldsize[0] == numberofvertices1:
                        setattr(md2, fieldi, field[pos_node])
                    elif fieldsize[0] == numberofvertices1 + 1:
                        setattr(md2, fieldi,
                                np.hstack((field[pos_node], field[-1, :])))
                    #size = number of elements * n
                    elif fieldsize[0] == numberofelements1:
                        setattr(md2, fieldi, field[pos_elem])

        #modify some specific fields

        #Mesh
        md2.mesh.numberofelements = numberofelements2
        md2.mesh.numberofvertices = numberofvertices2
        md2.mesh.elements = elements_2

        #mesh.uppervertex mesh.lowervertex
        if md1.mesh.__class__.__name__ == 'mesh3dprisms':
            md2.mesh.uppervertex = md1.mesh.uppervertex[pos_node]
            pos = np.where(~np.isnan(md2.mesh.uppervertex))[0]
            md2.mesh.uppervertex[pos] = Pnode[
                md2.mesh.uppervertex[pos].astype(int) - 1]

            md2.mesh.lowervertex = md1.mesh.lowervertex[pos_node]
            pos = np.where(~np.isnan(md2.mesh.lowervertex))[0]
            md2.mesh.lowervertex[pos] = Pnode[
                md2.mesh.lowervertex[pos].astype(int) - 1]

            md2.mesh.upperelements = md1.mesh.upperelements[pos_elem]
            pos = np.where(~np.isnan(md2.mesh.upperelements))[0]
            md2.mesh.upperelements[pos] = Pelem[
                md2.mesh.upperelements[pos].astype(int) - 1]

            md2.mesh.lowerelements = md1.mesh.lowerelements[pos_elem]
            pos = np.where(~np.isnan(md2.mesh.lowerelements))[0]
            md2.mesh.lowerelements[pos] = Pelem[
                md2.mesh.lowerelements[pos].astype(int) - 1]

        #Initial 2d mesh
        if md1.mesh.__class__.__name__ == 'mesh3dprisms':
            flag_elem_2d = flag_elem[np.arange(0, md1.mesh.numberofelements2d)]
            pos_elem_2d = np.nonzero(flag_elem_2d)[0]
            flag_node_2d = flag_node[np.arange(0, md1.mesh.numberofvertices2d)]
            pos_node_2d = np.nonzero(flag_node_2d)[0]

            md2.mesh.numberofelements2d = np.size(pos_elem_2d)
            md2.mesh.numberofvertices2d = np.size(pos_node_2d)
            md2.mesh.elements2d = md1.mesh.elements2d[pos_elem_2d, :]
            md2.mesh.elements2d[:, 0] = Pnode[md2.mesh.elements2d[:, 0] - 1]
            md2.mesh.elements2d[:, 1] = Pnode[md2.mesh.elements2d[:, 1] - 1]
            md2.mesh.elements2d[:, 2] = Pnode[md2.mesh.elements2d[:, 2] - 1]

            md2.mesh.x2d = md1.mesh.x[pos_node_2d]
            md2.mesh.y2d = md1.mesh.y[pos_node_2d]

        #Edges
        if m.strcmp(md.mesh.domaintype(), '2Dhorizontal'):
            if np.ndim(md2.mesh.edges) > 1 and np.size(
                    md2.mesh.edges, axis=1
            ) > 1:  #do not use ~isnan because there are some np.nans...
                #renumber first two columns
                pos = np.nonzero(md2.mesh.edges[:, 3] != -1)[0]
                md2.mesh.edges[:, 0] = Pnode[md2.mesh.edges[:, 0] - 1]
                md2.mesh.edges[:, 1] = Pnode[md2.mesh.edges[:, 1] - 1]
                md2.mesh.edges[:, 2] = Pelem[md2.mesh.edges[:, 2] - 1]
                md2.mesh.edges[pos, 3] = Pelem[md2.mesh.edges[pos, 3] - 1]
                #remove edges when the 2 vertices are not in the domain.
                md2.mesh.edges = md2.mesh.edges[np.nonzero(
                    np.logical_and(md2.mesh.edges[:, 0], md2.mesh.edges[:, 1])
                )[0], :]
                #Replace all zeros by -1 in the last two columns
                pos = np.nonzero(md2.mesh.edges[:, 2] == 0)[0]
                md2.mesh.edges[pos, 2] = -1
                pos = np.nonzero(md2.mesh.edges[:, 3] == 0)[0]
                md2.mesh.edges[pos, 3] = -1
                #Invert -1 on the third column with last column (Also invert first two columns!!)
                pos = np.nonzero(md2.mesh.edges[:, 2] == -1)[0]
                md2.mesh.edges[pos, 2] = md2.mesh.edges[pos, 3]
                md2.mesh.edges[pos, 3] = -1
                values = md2.mesh.edges[pos, 1]
                md2.mesh.edges[pos, 1] = md2.mesh.edges[pos, 0]
                md2.mesh.edges[pos, 0] = values
                #Finally remove edges that do not belong to any element
                pos = np.nonzero(
                    np.logical_and(md2.mesh.edges[:, 1] == -1,
                                   md2.mesh.edges[:, 2] == -1))[0]
                md2.mesh.edges = np.delete(md2.mesh.edges, pos, axis=0)

        #Penalties
        if np.any(np.logical_not(np.isnan(md2.stressbalance.vertex_pairing))):
            for i in xrange(np.size(md1.stressbalance.vertex_pairing, axis=0)):
                md2.stressbalance.vertex_pairing[i, :] = Pnode[
                    md1.stressbalance.vertex_pairing[i, :]]
            md2.stressbalance.vertex_pairing = md2.stressbalance.vertex_pairing[
                np.nonzero(md2.stressbalance.vertex_pairing[:, 0])[0], :]
        if np.any(np.logical_not(np.isnan(md2.masstransport.vertex_pairing))):
            for i in xrange(np.size(md1.masstransport.vertex_pairing, axis=0)):
                md2.masstransport.vertex_pairing[i, :] = Pnode[
                    md1.masstransport.vertex_pairing[i, :]]
            md2.masstransport.vertex_pairing = md2.masstransport.vertex_pairing[
                np.nonzero(md2.masstransport.vertex_pairing[:, 0])[0], :]

        #recreate segments
        if md1.mesh.__class__.__name__ == 'mesh2d':
            md2.mesh.vertexconnectivity = NodeConnectivity(
                md2.mesh.elements, md2.mesh.numberofvertices)[0]
            md2.mesh.elementconnectivity = ElementConnectivity(
                md2.mesh.elements, md2.mesh.vertexconnectivity)[0]
            md2.mesh.segments = contourenvelope(md2)
            md2.mesh.vertexonboundary = np.zeros(numberofvertices2, bool)
            md2.mesh.vertexonboundary[md2.mesh.segments[:, 0:2] - 1] = True
        else:
            #First do the connectivity for the contourenvelope in 2d
            md2.mesh.vertexconnectivity = NodeConnectivity(
                md2.mesh.elements2d, md2.mesh.numberofvertices2d)[0]
            md2.mesh.elementconnectivity = ElementConnectivity(
                md2.mesh.elements2d, md2.mesh.vertexconnectivity)[0]
            segments = contourenvelope(md2)
            md2.mesh.vertexonboundary = np.zeros(
                numberofvertices2 / md2.mesh.numberoflayers, bool)
            md2.mesh.vertexonboundary[segments[:, 0:2] - 1] = True
            md2.mesh.vertexonboundary = np.tile(md2.mesh.vertexonboundary,
                                                md2.mesh.numberoflayers)
            #Then do it for 3d as usual
            md2.mesh.vertexconnectivity = NodeConnectivity(
                md2.mesh.elements, md2.mesh.numberofvertices)[0]
            md2.mesh.elementconnectivity = ElementConnectivity(
                md2.mesh.elements, md2.mesh.vertexconnectivity)[0]

        #Boundary conditions: Dirichlets on new boundary
        #Catch the elements that have not been extracted
        orphans_elem = np.nonzero(np.logical_not(flag_elem))[0]
        orphans_node = np.unique(md1.mesh.elements[orphans_elem, :]) - 1
        #Figure out which node are on the boundary between md2 and md1
        nodestoflag1 = np.intersect1d(orphans_node, pos_node)
        nodestoflag2 = Pnode[nodestoflag1].astype(int) - 1
        if np.size(md1.stressbalance.spcvx) > 1 and np.size(
                md1.stressbalance.spcvy) > 2 and np.size(
                    md1.stressbalance.spcvz) > 2:
            if np.size(md1.inversion.vx_obs) > 1 and np.size(
                    md1.inversion.vy_obs) > 1:
                md2.stressbalance.spcvx[nodestoflag2] = md2.inversion.vx_obs[
                    nodestoflag2]
                md2.stressbalance.spcvy[nodestoflag2] = md2.inversion.vy_obs[
                    nodestoflag2]
            else:
                md2.stressbalance.spcvx[nodestoflag2] = np.nan
                md2.stressbalance.spcvy[nodestoflag2] = np.nan
                print "\n!! extract warning: spc values should be checked !!\n\n"
            #put 0 for vz
            md2.stressbalance.spcvz[nodestoflag2] = 0
        if np.any(np.logical_not(np.isnan(md1.thermal.spctemperature))):
            md2.thermal.spctemperature[nodestoflag2] = 1

        #Results fields
        if md1.results:
            md2.results = results()
            for solutionfield, field in md1.results.__dict__.iteritems():
                if isinstance(field, list):
                    setattr(md2.results, solutionfield, [])
                    #get time step
                    for i, fieldi in enumerate(field):
                        if isinstance(fieldi, results) and fieldi:
                            getattr(md2.results,
                                    solutionfield).append(results())
                            fieldr = getattr(md2.results, solutionfield)[i]
                            #get subfields
                            for solutionsubfield, subfield in fieldi.__dict__.iteritems(
                            ):
                                if np.size(subfield) == numberofvertices1:
                                    setattr(fieldr, solutionsubfield,
                                            subfield[pos_node])
                                elif np.size(subfield) == numberofelements1:
                                    setattr(fieldr, solutionsubfield,
                                            subfield[pos_elem])
                                else:
                                    setattr(fieldr, solutionsubfield, subfield)
                        else:
                            getattr(md2.results, solutionfield).append(None)
                elif isinstance(field, results):
                    setattr(md2.results, solutionfield, results())
                    if isinstance(field, results) and field:
                        fieldr = getattr(md2.results, solutionfield)
                        #get subfields
                        for solutionsubfield, subfield in field.__dict__.iteritems(
                        ):
                            if np.size(subfield) == numberofvertices1:
                                setattr(fieldr, solutionsubfield,
                                        subfield[pos_node])
                            elif np.size(subfield) == numberofelements1:
                                setattr(fieldr, solutionsubfield,
                                        subfield[pos_elem])
                            else:
                                setattr(fieldr, solutionsubfield, subfield)

        #Keep track of pos_node and pos_elem
        md2.mesh.extractedvertices = pos_node + 1
        md2.mesh.extractedelements = pos_elem + 1

        return md2
Esempio n. 17
0
def parseresultsfromdiskioserial(filename):    # {{{
	"""
	PARSERESULTSFROMDISK - ...
	 
	    Usage:
	       results=parseresultsfromdiskioserial(filename)
	"""

	#Open file
	try:
		fid=open(filename,'rb')
	except IOError as e:
		raise IOError("loadresultsfromdisk error message: could not open '%s' for binary reading." % filename)

	#initialize results: 
	results=[]
	results.append(None)

	#Read fields until the end of the file.
	result=ReadData(fid)

	counter=0
	check_nomoresteps=0
	step=result['step']

	while result:

		if check_nomoresteps:
			#check that the new result does not add a step, which would be an error: 
			if result['step']>=1:
				raise TypeError("parsing results for a steady-state core, which incorporates transient results!")

		#Check step, increase counter if this is a new step
		if(step!=result['step'] and result['step']>1):
			counter = counter + 1
			step    = result['step']

		#Add result
		if result['step']==0:
			#if we have a step = 0, this is a steady state solution, don't expect more steps. 
			index = 0;
			check_nomoresteps=1
	
		elif result['step']==1:
			index = 0
		else:
			index = counter;
	
		if index > len(results)-1:
			for i in range(len(results)-1,index-1):
				results.append(None)
			results.append(resultsclass.results())
		
		elif results[index] is None:
			results[index]=resultsclass.results()

			
		#Get time and step
		if result['step'] != -9999.:
			setattr(results[index],'step',result['step'])
		if result['time'] != -9999.:
			setattr(results[index],'time',result['time']) 
	
		#Add result
		if hasattr(results[index],result['fieldname']) and not m.strcmp(result['fieldname'],'SolutionType'):
			setattr(results[index],result['fieldname'],numpy.vstack((getattr(results[index],result['fieldname']),result['field'])))
		else:
			setattr(results[index],result['fieldname'],result['field'])

		#read next result
		result=ReadData(fid)

	fid.close()

	return results
Esempio n. 18
0
def SetMarineIceSheetBC(md, icefrontfile=''):
    """
	SETICEMARINESHEETBC - Create the boundary conditions for stressbalance and thermal models for a  Marine Ice Sheet with Ice Front

	   Neumann BC are used on the ice front (an ARGUS contour around the ice front
	   can be given in input, or it will be deduced as onfloatingice & onboundary)
	   Dirichlet BC are used elsewhere for stressbalance

	   Usage:
	      md=SetMarineIceSheetBC(md,icefrontfile)
	      md=SetMarineIceSheetBC(md)

	   Example:
	      md=SetMarineIceSheetBC(md,'Front.exp')
	      md=SetMarineIceSheetBC(md)

	   See also: SETICESHELFBC, SETMARINEICESHEETBC
	"""

    #node on Dirichlet (boundary and ~icefront)
    if icefrontfile:
        #User provided Front.exp, use it
        if not os.path.exists(icefrontfile):
            raise IOError(
                "SetMarineIceSheetBC error message: ice front file '%s' not found."
                % icefrontfile)
        incontour = ContourToMesh(md.mesh.elements, md.mesh.x, md.mesh.y,
                                  icefrontfile, 'node', 2)
        vertexonicefront = np.logical_and(md.mesh.vertexonboundary,
                                          incontour.reshape(-1))
    else:
        #Guess where the ice front is
        vertexonfloatingice = np.zeros((md.mesh.numberofvertices))
        pos = np.nonzero(
            np.sum(md.mask.groundedice_levelset[md.mesh.elements - 1] < 0.,
                   axis=1) > 0.)[0]
        vertexonfloatingice[md.mesh.elements[pos].astype(int) - 1] = 1.
        vertexonicefront = np.logical_and(
            np.reshape(md.mesh.vertexonboundary, (-1, )),
            vertexonfloatingice > 0.)

#	pos=find(md.mesh.vertexonboundary & ~vertexonicefront);
    pos = np.nonzero(
        np.logical_and(md.mesh.vertexonboundary,
                       np.logical_not(vertexonicefront)))[0]
    if not np.size(pos):
        print "SetMarineIceSheetBC warning: ice front all around the glacier, no dirichlet found. Dirichlet must be added manually."

    md.stressbalance.spcvx = float('nan') * np.ones(md.mesh.numberofvertices)
    md.stressbalance.spcvy = float('nan') * np.ones(md.mesh.numberofvertices)
    md.stressbalance.spcvz = float('nan') * np.ones(md.mesh.numberofvertices)
    md.stressbalance.referential = float('nan') * np.ones(
        (md.mesh.numberofvertices, 6))
    md.stressbalance.loadingforce = 0 * np.ones((md.mesh.numberofvertices, 3))

    #Position of ice front
    pos = np.nonzero(vertexonicefront)[0]
    md.mask.ice_levelset[pos] = 0

    #First find segments that are not completely on the front
    if m.strcmp(md.mesh.elementtype(), 'Penta'):
        numbernodesfront = 4
    elif m.strcmp(md.mesh.elementtype(), 'Tria'):
        numbernodesfront = 2
    else:
        raise StandardError("Mesh type not supported")
    if any(md.mask.ice_levelset <= 0):
        values = md.mask.ice_levelset[md.mesh.segments[:, 0:-1] - 1]
        segmentsfront = 1 - values
        np.sum(segmentsfront, axis=1) != numbernodesfront
        segments = np.nonzero(
            np.sum(segmentsfront, axis=1) != numbernodesfront)[0]
        #Find all nodes for these segments and spc them
        pos = md.mesh.segments[segments, 0:-1] - 1
    else:
        pos = np.nonzero(md.mesh.vertexonboundary)[0]
    md.stressbalance.spcvx[pos] = 0
    md.stressbalance.spcvy[pos] = 0
    md.stressbalance.spcvz[pos] = 0

    #Dirichlet Values
    if isinstance(md.inversion.vx_obs, np.ndarray) and np.size(
            md.inversion.vx_obs,
            axis=0) == md.mesh.numberofvertices and isinstance(
                md.inversion.vy_obs, np.ndarray) and np.size(
                    md.inversion.vy_obs, axis=0) == md.mesh.numberofvertices:
        print "      boundary conditions for stressbalance model: spc set as observed velocities"
        md.stressbalance.spcvx[pos] = md.inversion.vx_obs[pos]
        md.stressbalance.spcvy[pos] = md.inversion.vy_obs[pos]
    else:
        print "      boundary conditions for stressbalance model: spc set as zero"

    md.hydrology.spcwatercolumn = np.zeros((md.mesh.numberofvertices, 2))
    pos = np.nonzero(md.mesh.vertexonboundary)[0]
    md.hydrology.spcwatercolumn[pos, 0] = 1

    #Create zeros basalforcings and smb
    md.smb.initialize(md)
    md.basalforcings.initialize(md)

    #Deal with other boundary conditions
    if np.all(np.isnan(md.balancethickness.thickening_rate)):
        md.balancethickness.thickening_rate = np.zeros(
            (md.mesh.numberofvertices))
        print "      no balancethickness.thickening_rate specified: values set as zero"

    md.masstransport.spcthickness = float('nan') * np.ones(
        (md.mesh.numberofvertices))
    md.balancethickness.spcthickness = float('nan') * np.ones(
        (md.mesh.numberofvertices))
    md.damage.spcdamage = float('nan') * np.ones((md.mesh.numberofvertices))

    if isinstance(md.initialization.temperature, np.ndarray) and np.size(
            md.initialization.temperature, axis=0) == md.mesh.numberofvertices:
        md.thermal.spctemperature = float('nan') * np.ones(
            (md.mesh.numberofvertices))
        if hasattr(md.mesh, 'vertexonsurface'):
            pos = np.nonzero(md.mesh.vertexonsurface)[0]
            md.thermal.spctemperature[pos] = md.initialization.temperature[
                pos]  #impose observed temperature on surface
        if not isinstance(
                md.basalforcings.geothermalflux,
                np.ndarray) or not np.size(md.basalforcings.geothermalflux,
                                           axis=0) == md.mesh.numberofvertices:
            md.basalforcings.geothermalflux = np.zeros(
                (md.mesh.numberofvertices))
            md.basalforcings.geothermalflux[np.nonzero(
                md.mask.groundedice_levelset > 0.)] = 50. * 10.**-3  #50mW/m2
    else:
        print "      no thermal boundary conditions created: no observed temperature found"

    return md
Esempio n. 19
0
def SetIceShelfBC(md, icefrontfile=''):
    """
	SETICESHELFBC - Create the boundary conditions for stressbalance and thermal models for a  Ice Shelf with Ice Front

	   Neumann BC are used on the ice front (an ARGUS contour around the ice front
	   must be given in input)
	   Dirichlet BC are used elsewhere for stressbalance

	   Usage:
	      md=SetIceShelfBC(md,varargin)

	   Example:
	      md=SetIceShelfBC(md);
	      md=SetIceShelfBC(md,'Front.exp');

	   See also: SETICESHEETBC, SETMARINEICESHEETBC
	"""

    #node on Dirichlet (boundary and ~icefront)
    if icefrontfile:
        if not os.path.exists(icefrontfile):
            raise IOError(
                "SetIceShelfBC error message: ice front file '%s' not found." %
                icefrontfile)
        [nodeinsideicefront,
         dum] = ContourToMesh(md.mesh.elements, md.mesh.x, md.mesh.y,
                              icefrontfile, 'node', 2)
        nodeonicefront = numpy.logical_and(md.mesh.vertexonboundary,
                                           nodeinsideicefront.reshape(-1))
    else:
        nodeonicefront = numpy.zeros((md.mesh.numberofvertices), bool)

#	pos=find(md.mesh.vertexonboundary & ~nodeonicefront);
    pos = numpy.nonzero(
        numpy.logical_and(md.mesh.vertexonboundary,
                          numpy.logical_not(nodeonicefront)))[0]
    md.stressbalance.spcvx = float('nan') * numpy.ones(
        md.mesh.numberofvertices)
    md.stressbalance.spcvy = float('nan') * numpy.ones(
        md.mesh.numberofvertices)
    md.stressbalance.spcvz = float('nan') * numpy.ones(
        md.mesh.numberofvertices)
    md.stressbalance.referential = float('nan') * numpy.ones(
        (md.mesh.numberofvertices, 6))
    md.stressbalance.loadingforce = 0 * numpy.ones(
        (md.mesh.numberofvertices, 3))

    #Icefront position
    pos = numpy.nonzero(nodeonicefront)[0]
    md.mask.ice_levelset[pos] = 0

    #First find segments that are not completely on the front
    if m.strcmp(md.mesh.elementtype(), 'Penta'):
        numbernodesfront = 4
    elif m.strcmp(md.mesh.elementtype(), 'Tria'):
        numbernodesfront = 2
    else:
        raise error('mesh type not supported yet')
    if any(md.mask.ice_levelset <= 0):
        values = md.mask.ice_levelset[md.mesh.segments[:, 0:-1] - 1]
        segmentsfront = 1 - values
        numpy.sum(segmentsfront, axis=1) != numbernodesfront
        segments = numpy.nonzero(
            numpy.sum(segmentsfront, axis=1) != numbernodesfront)[0]
        #Find all nodes for these segments and spc them
        pos = md.mesh.segments[segments, 0:-1] - 1
    else:
        pos = numpy.nonzero(md.mesh.vertexonboundary)[0]
    md.stressbalance.spcvx[pos] = 0
    md.stressbalance.spcvy[pos] = 0
    md.stressbalance.spcvz[pos] = 0

    #Dirichlet Values
    if isinstance(md.inversion.vx_obs, numpy.ndarray) and numpy.size(
            md.inversion.vx_obs,
            axis=0) == md.mesh.numberofvertices and isinstance(
                md.inversion.vy_obs, numpy.ndarray) and numpy.size(
                    md.inversion.vy_obs, axis=0) == md.mesh.numberofvertices:
        #reshape to rank-2 if necessary to match spc arrays
        if numpy.ndim(md.inversion.vx_obs) == 1:
            md.inversion.vx_obs = md.inversion.vx_obs.reshape(-1, 1)
        if numpy.ndim(md.inversion.vy_obs) == 1:
            md.inversion.vy_obs = md.inversion.vy_obs.reshape(-1, 1)
        print(
            "      boundary conditions for stressbalance model: spc set as observed velocities"
        )
        md.stressbalance.spcvx[pos] = md.inversion.vx_obs[pos]
        md.stressbalance.spcvy[pos] = md.inversion.vy_obs[pos]
    else:
        print(
            "      boundary conditions for stressbalance model: spc set as zero"
        )

    #Create zeros basalforcings and smb
    md.smb.initialize(md)
    md.basalforcings.initialize(md)

    #Deal with other boundary conditions
    if numpy.all(numpy.isnan(md.balancethickness.thickening_rate)):
        md.balancethickness.thickening_rate = numpy.zeros(
            (md.mesh.numberofvertices, 1))
        print(
            "      no balancethickness.thickening_rate specified: values set as zero"
        )
    md.masstransport.spcthickness = float('nan') * numpy.ones(
        (md.mesh.numberofvertices, 1))
    md.balancethickness.spcthickness = float('nan') * numpy.ones(
        (md.mesh.numberofvertices, 1))
    md.damage.spcdamage = float('nan') * numpy.ones(
        (md.mesh.numberofvertices, 1))

    if isinstance(md.initialization.temperature, numpy.ndarray) and numpy.size(
            md.initialization.temperature, axis=0) == md.mesh.numberofvertices:
        md.thermal.spctemperature = float('nan') * numpy.ones(
            (md.mesh.numberofvertices, 1))
        if hasattr(md.mesh, 'vertexonsurface'):
            pos = numpy.nonzero(md.mesh.vertexonsurface)[0]
            md.thermal.spctemperature[pos] = md.initialization.temperature[
                pos]  #impose observed temperature on surface
        if not isinstance(md.basalforcings.geothermalflux,
                          numpy.ndarray) or not numpy.size(
                              md.basalforcings.geothermalflux,
                              axis=0) == md.mesh.numberofvertices:
            md.basalforcings.geothermalflux = numpy.zeros(
                (md.mesh.numberofvertices, 1))
    else:
        print(
            "      no thermal boundary conditions created: no observed temperature found"
        )

    return md
Esempio n. 20
0
def ReadData(fid):    # {{{
	"""
	READDATA - ...
	 
	    Usage:
	       field=ReadData(fid)
	"""

	#read field
	try:
		length=struct.unpack('i',fid.read(struct.calcsize('i')))[0]

		fieldname=struct.unpack('%ds' % length,fid.read(length))[0][:-1]
		time=struct.unpack('d',fid.read(struct.calcsize('d')))[0]
		step=struct.unpack('i',fid.read(struct.calcsize('i')))[0]

		type=struct.unpack('i',fid.read(struct.calcsize('i')))[0]
		M=struct.unpack('i',fid.read(struct.calcsize('i')))[0]
		if   type==1:
			field=numpy.array(struct.unpack('%dd' % M,fid.read(M*struct.calcsize('d'))),dtype=float)
		elif type==2:
			field=struct.unpack('%ds' % M,fid.read(M))[0][:-1]
		elif type==3:
			N=struct.unpack('i',fid.read(struct.calcsize('i')))[0]
#			field=transpose(fread(fid,[N M],'double'));
			field=numpy.zeros(shape=(M,N),dtype=float)
			for i in range(M):
				field[i,:]=struct.unpack('%dd' % N,fid.read(N*struct.calcsize('d')))
		else:
			raise TypeError("cannot read data of type %d" % type)

		#Process units here FIXME: this should not be done here!
		yts=365.0*24.0*3600.0
		if m.strcmp(fieldname,'BalancethicknessThickeningRate'):
			field = field*yts
		elif m.strcmp(fieldname,'Time'):
			field = field/yts
		elif m.strcmp(fieldname,'HydrologyWaterVx'):
			field = field*yts
		elif m.strcmp(fieldname,'HydrologyWaterVy'):
			field = field*yts
		elif m.strcmp(fieldname,'Vx'):
			field = field*yts
		elif m.strcmp(fieldname,'Vy'):
			field = field*yts
		elif m.strcmp(fieldname,'Vz'):
			field = field*yts
		elif m.strcmp(fieldname,'Vel'):
			field = field*yts
		elif m.strcmp(fieldname,'BasalforcingsGroundediceMeltingRate'):
			field = field*yts
		elif m.strcmp(fieldname,'TotalSmb'):
			field = field/10.**12.*yts #(GigaTon/year)
		elif m.strcmp(fieldname,'SmbMassBalance'):
			field = field*yts
		elif m.strcmp(fieldname,'CalvingCalvingrate'):
			field = field*yts


		result=OrderedDict()
		result['fieldname']=fieldname
		result['time']=time
		result['step']=step
		result['field']=field

	except struct.error as e:
		result=None

	return result