def master(self): from ooflib.SWIG.common import mpitools _rank = mpitools.Rank() if self.isShared(): if self._shared_with[0] < _rank: return self._shared_with[0] return _rank
def fmsg(*args): from ooflib.SWIG.common import ooferror if _debug_mode: lock.acquire() try: stack = traceback.extract_stack() depth = len(stack) filename = string.split(stack[-2][0], '/')[-1] func = stack[-2][2] line = stack[-2][1] try: thread = "%02d"% threadstate.findThreadNumber() except: thread = '??' if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools rank="%02d" % mpitools.Rank() else: rank='--' print >> sys.stderr, \ ('-%04d-%s-%s'%(depth,thread,rank))+'-'*(depth-1), \ '%s(%d):%s'%(filename, line, func),\ string.join(map(str, args), ' ') finally: # pass lock.release()
def queryElem(self, menuitem, position): context = self.gfxwindow().topwho(*self.whoset) if not context: return if parallel_enable.enabled(): meshIPC.ipcmeshmenu.Mesh_Info_Query(targetname="Element", position=position, mesh=context.path()) if mpitools.Rank()>0: return if position == self.last_position: meshpos = self.querier.mesh_position else: if config.dimension() == 2: try: context.restoreCachedData(self.gfxwindow().displayTime) try: meshpos = self.meshlayer.undisplaced_from_displaced( self.gfxwindow(), position) finally: context.releaseCachedData() except ooferror.ErrBoundsError: return elif config.dimension() == 3: # skip handling displaced meshes in 3D for now. # TODO 3D Later: figure this out meshpos = position femesh = context.getObject() skeleton = context.getSkeleton() selem = skeleton.enclosingElement(meshpos) felem = femesh.getElement(selem.meshindex) self.finishQuery(felem, felem, "Element", position, meshpos)
def fmsg(*args): ## Importing ooferror here hangs the program when a binary data ## file is loaded if fmsg is used in OOFIdleCallback. WTF? There ## may be no real need to import ooferror here, but it was ## imported here once, and mysteriously caused a problem. ## (There's a comment in mainthreadGUI that refers to this ## comment.) # from ooflib.SWIG.common import ooferror if _debug_mode: lock.acquire() try: stack = traceback.extract_stack() depth = len(stack) filename = string.split(stack[-2][0], '/')[-1] func = stack[-2][2] line = stack[-2][1] try: thread = "%02d" % threadstate.findThreadNumber() except: thread = '??' if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools rank = "%02d" % mpitools.Rank() else: rank = '--' print >> sys.stderr, \ ('-%04d-%s-%s'%(depth,thread,rank))+'-'*(depth-1), \ '%s(%d):%s'%(filename, line, func),\ string.join(map(str, args), ' ') finally: lock.release()
def dumpTrace(start=0, end=-1): ## Use start=-3, end=-2 if you only want to see the calling ## function of the function that calls dumpTrace. lock.acquire() try: stack = traceback.extract_stack() depth = len(stack) thread = threadstate.findThreadNumber() if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools rank = "%02d" % mpitools.Rank() else: rank = '--' lines = [ '+++-%04d-%02d-%s--------- debug.dumpTrace --------------' % (depth, thread, rank) ] for line in stack[start:end]: lines.append('+++%30s:%3d\t%s\t\t%s' % (line[0], line[1], line[2], line[3])) lines.append('+++-------------- end trace -----------------') print >> sys.stderr, string.join(lines, '\n') finally: lock.release()
def __init__(self, nodes, index): SkeletonElementBase.__init__(self, nodes) skeletonselectable.SkeletonSelectable.__init__(self, index) # Although the CSkeletonElement keeps a list of # CSkeletonNodes, the extra information in the Python # SkeletonNodes isn't available unless we keep a list of them # here, as well. It's possible that we could move all the # extra info into the CSkeletonNode class and swig it. # Canonical ordering will still work, because it's based on # indices. self.nodes = nodes for node in nodes: node.addElement(self) # When a real mesh is made from the skeleton in which this # element lives, self.meshindex gets assigned the index of # that element. This index is the same for all the real meshes. self.meshindex = None self.ID = object_id.ObjectID() # process ID (only meaningful in parallel mode) if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools self._procID = mpitools.Rank() else: self._procID = None
def start_sockets_Back_End(): if parallel_enable.enabled(): from ooflib.common.IO import socket2me from ooflib.SWIG.common import mpitools ## receive from front end what port to listen s_address = mpitools.recv_bcast_string(0) s_name = mpitools.recv_bcast_string(0) ## create a unique SocketInput object socket2me.makeSocketInput(s_name, int(s_address), mpitools.Rank())
def start_parallel_machine(): global _rank try: from ooflib.SWIG.common import mpitools except ImportError: pass else: mpitools.Initialize(sys.argv) _rank = mpitools.Rank()
def _deletematerial(menuitem, name): matmanager.delete(name) if parallel_enable.enabled(): try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: materialmenuIPC.ipcmaterialmenu.Delete(name=name) except ImportError: pass
def _newmaterial(menuitem, name, material_type=None): # Backward compatibility with old version without a material_type parameter material_type = material_type or MATERIALTYPE_BULK matmanager.add(name, material_type) if parallel_enable.enabled(): try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: materialmenuIPC.ipcmaterialmenu.New(name=name) except ImportError: pass
def _removeprop(menuitem, name, property): matmanager.remove_prop(name, property) if parallel_enable.enabled() == 1: try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: materialmenuIPC.ipcmaterialmenu.Remove_property( name=name, property=property) except ImportError: pass switchboard.notify("material changed", name) switchboard.notify("redraw")
def _wrapped_assignmat(menuitem, material, microstructure, pixels): _assignmat(material, microstructure, pixels) if parallel_enable.enabled(): try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: materialmenuIPC.ipcmaterialmenu.Assign( material=material, microstructure=microstructure, pixels=pixels) except ImportError: pass
def pipeToSocket(menuitem, args, kwargs): ## front end encodes a menuitem and its arguments and sends it ## to the back end global socketPipe if mpitools.Rank() == 0: binarypipe.startCmd(menuitem) ## grabs the menuitem and encodes it for (key, value) in kwargs.items(): binarypipe.argument(key, value) ## iterate over all the keyword arguments ## and encode them binarypipe.endCmd() ## Send the encoded data to back end else: pass
def getThreadedParallelWorker(menuitem, argtuple, argdict): ## parallel workers try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: ## front-end runs threaded workers with text progress bars return ParallelTextThreadedWorker(menuitem, argtuple, argdict) ## back-end runs a threaded worker too, but no text progress bar ## is available. Back-end, instead, runs a thread, and the ## "front-end' of the back-end waits on the socket parser return ParallelThreadedWorker(menuitem, argtuple, argdict) except ImportError: raise
def passive(self, skeleton, stopper): # Parallel mode -- share-holder from ooflib.SWIG.common import mpitools _rank = mpitools.Rank() # Getting the node index (local) index = mpitools.Recv_Int(stopper) node = skeleton.getNodeWithIndex(index) # Get the neighbor nodes (but not ones are shared with stoppers, # since they are already accounted for at "stopper".) nbrNodes = node.neighborNodes(skeleton) positions = [] for nd in nbrNodes: if _rank == nd.master(): # my own node positions.append(skeleton.nodePosition(nd).x) positions.append(skeleton.nodePosition(nd).y) mpitools.Send_DoubleVec(positions, stopper)
def passive(self, skeleton, node, stopper): from ooflib.SWIG.common import mpitools _rank = mpitools.Rank() # now, find all neighboring nodes neighbors = node.neighborNodes(skeleton) # among these, collect ones of which I'm the master. neighbors = [n for n in neighbors if _rank == n.master()] if neighbors: coords = [[skeleton.nodePosition(n).x, skeleton.nodePosition(n).y] for n in neighbors] coords = reduce(lambda x,y: x+y, coords) else: coords = [] # send the nodes mpitools.Send_DoubleVec(coords, stopper)
def callerID(depth=-3): if _debug_mode: lock.acquire() try: stack = traceback.extract_stack() thread = threadstate.findThreadNumber() if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools rank = "%02d" % mpitools.Rank() else: rank = "" line = stack[depth] return '+++%02d++%s+++%30s:%3d\t%s\t\t%s' % \ (thread, rank, line[0], line[1], line[2], line[3]) finally: lock.release()
def cleanup(shutdownfn, exitstatus): # Turn off logging, so that window closing, etc. won't be logged. mainmenu.OOF.haltLog() if parallel_enable.enabled(): try: from ooflib.SWIG.common import mpitools if mpitools.Rank() == 0: # tell back-end to start quit process mainmenu.OOF.LoadData.IPC.Quit() except ImportError: pass # Wait on a subthread for threads to finish, then call shutdownfn # on the main thread. When called from the GUI callback for the # Quit command, shutdownfn is common.IO.GUI.shutdown. subthread.execute_immortal(waitForThreads, (shutdownfn, exitstatus))
def msg(*args): if _debug_mode: lock.acquire() try: stack = traceback.extract_stack() depth = len(stack) thread = threadstate.findThreadNumber() if parallel_enable.enabled(): from ooflib.SWIG.common import mpitools rank = "%02d" % mpitools.Rank() else: rank = '--' print >> sys.stderr, \ ('-%04d-%02d-%s'%(depth,thread,rank))+'-'*(depth-1), \ string.join(map(str, args), ' ') finally: lock.release()
def queryElem(self, menuitem, position): context = self.getSkeletonContext() if not context: return skeleton = context.getObject() if parallel_enable.enabled(): skeletonIPC.smenu.Skel_Info_Query(targetname="Element", position=position, skeleton=context.path()) if mpitools.Rank() > 0: return elem = skeleton.enclosingElement(position) if not elem: reporter.report("Try to click ON an Element, dude.") else: self.finishQuery(context, elem, "Element", position)
def queryNode(self, menuitem, position): context = self.getSkeletonContext() if not context: return skeleton = context.getObject() if parallel_enable.enabled(): skeletonIPC.smenu.Skel_Info_Query(targetname="Node", position=position, skeleton=context.path()) if mpitools.Rank() > 0: return node = skeleton.nearestNode(position) if not node: reporter.report("Avoid clicking OUTSIDE of a skeleton.") else: self.finishQuery(context, node, "Node", position)
def parallel_info_subproblem(menuitem, subproblem): subpctxt = ooflib.engine.subproblemcontext.subproblems[subproblem] subpctxt.begin_reading() reportstring = "" nGatherNodes = 0 nElements = 0 TotalArea = 0 try: nElements = subpctxt.nelements() TotalArea = subpctxt.area() reportstring = """*** Subproblem Info *** %s %d elements %d nodes area = %g\n""" % (subpctxt.getObject(), nElements, subpctxt.nnodes(), TotalArea) nGatherNodes = subpctxt.getObject().GatherNumNodes() finally: subpctxt.end_reading() if mpitools.Rank() == 0: #Get output from other processes for proc in range(mpitools.Size()): if proc != 0: reportstring += "(From remote process %d:)\n" % proc reportstring += mpitools.Recv_String(proc) nElements += mpitools.Recv_Int(proc) TotalArea += mpitools.Recv_Double(proc) reportstring += """***** Totals (union) ***** %d elements %d unique nodes Total Area = %g""" % (nElements, nGatherNodes, TotalArea) reporter.report(reportstring) else: mpitools.Send_String(reportstring, 0) mpitools.Send_Int(nElements, 0) mpitools.Send_Double(TotalArea, 0)
# with its operation, modification and maintenance. However, to # facilitate maintenance we ask that before distributing modified # versions of this software, you first contact the authors at # [email protected]. from ooflib.common import debug from ooflib.common import primitives from ooflib.common.IO import automatic from ooflib.common.IO import oofmenu from ooflib.common.IO import parallelmainmenu from ooflib.common.IO import parameter from ooflib.common.IO import socket2me from ooflib.SWIG.common import mpitools import ooflib.common.microstructure _rank = mpitools.Rank() _size = mpitools.Size() # OOF.LoadData.IPC.Microstructure msmenu = parallelmainmenu.ipcmenu.addItem( oofmenu.OOFMenuItem('Microstructure', secret=1, no_log=1)) def newMicrostructure_Parallel(menuitem, name, width, height, width_in_pixels, height_in_pixels): # Only for the back-end processes global _rank if _rank == 0: return ms = ooflib.common.microstructure.Microstructure(
"Non-string data passed to socketWrite.") def pipeToSocket(menuitem, args, kwargs): ## front end encodes a menuitem and its arguments and sends it ## to the back end global socketPipe if mpitools.Rank() == 0: binarypipe.startCmd(menuitem) ## grabs the menuitem and encodes it for (key, value) in kwargs.items(): binarypipe.argument(key, value) ## iterate over all the keyword arguments ## and encode them binarypipe.endCmd() ## Send the encoded data to back end else: pass socketPort = None socketPipe = None binarypipe = None if mpitools.Rank() == 0: socketPort = SocketPort() ## front end creates a socket input socketPipe = SocketPipe(socketPort) ## front end also creates a socket pipe ## to send all the menuitems in binary mode from ooflib.common.IO.binarydata import BinaryDataFile binarypipe = BinaryDataFile(socketPipe)