Ejemplo n.º 1
0
 def applyAMR(self, skeleton):  # for adaptive mesh refinement
     prog = progress.getProgress("Refine", progress.DEFINITE)
     try:
         newSkeleton = skeleton.improperCopy(fresh=True)
         return self.refinement(skeleton, newSkeleton, None, prog)
     finally:
         prog.finish()
Ejemplo n.º 2
0
 def apply(self, skeleton, context):
     prog = progress.getProgress("Refine", progress.DEFINITE)
     try:
         newSkeleton = skeleton.improperCopy(skeletonpath=context.path())
         return self.refinement(skeleton, newSkeleton, context, prog)
     finally:
         prog.finish()
Ejemplo n.º 3
0
 def apply(self, skeleton, context):
     prog = progress.getProgress("Refine", progress.DEFINITE)
     try:
         newSkeleton = skeleton.improperCopy(skeletonpath=context.path())
         return self.refinement(skeleton, newSkeleton, context, prog)
     finally:
         prog.finish()
Ejemplo n.º 4
0
def _flux(mesh, elements, coords, flux):
    ans = []
    prog = progress.getProgress("Evaluating flux", progress.DEFINITE)
    ## TODO OPT: elements may be a generator, and converting it to a list
    ## is ugly and wasteful, but it's the only way to get its length,
    ## which we only need for the progress bar.  We should either get
    ## rid of the progress bar, or find another way to get the number
    ## of elements.  Perhaps giving the Output access to the domain
    ## would work.  Output.evaluate isn't always called with a domain,
    ## though.  See MeshDataGUI.updateData().
    elist = list(elements)
    nel = len(elist)
    try:
        ecount = 0
        for elem, ecoords in itertools.izip(elist, coords):
            mesh.begin_all_subproblems(elem)
            fluxes = elem.outputFluxes(mesh, flux, ecoords)
            ans.append(fluxes)
            mesh.end_all_subproblems(elem)
            ecount += 1
            prog.setFraction((1. * ecount) / nel)
            prog.setMessage("%d/%d elements" % (ecount, nel))
        return utils.flatten1(ans)
    finally:
        prog.finish()
Ejemplo n.º 5
0
def createPixelGroups(menuitem, image, name_template):
    if name_template is None:
        name_template = '%c'            # pre-2.0.4 behavior
        
    immidgecontext = imagecontext.imageContexts[image]
    ms = immidgecontext.getMicrostructure()
    mscontext = ooflib.common.microstructure.microStructures[ms.name()]
    immidge = immidgecontext.getObject()
    prog = progress.getProgress("AutoGroup", progress.DEFINITE)
    prog.setMessage("Categorizing pixels...")
    mscontext.begin_writing()
    try:
        newgrpname = autogroupMP.autogroup(ms, immidge, name_template)
    finally:
        prog.finish()
        mscontext.end_writing()

    switchboard.notify('redraw')
        
    if not prog.stopped():      # not interrupted
        # Do this only after releasing the ms write lock!  If the main
        # thread is waiting for the read lock, then switchboard.notify
        # will deadlock here.
        if newgrpname:
            # We only have to send the notification for the most
            # recently created group.
            switchboard.notify("new pixel group", ms.findGroup(newgrpname))
        switchboard.notify("changed pixel groups", ms.name())
Ejemplo n.º 6
0
 def __init__(self, array):
     prog = progress.getProgress("CategoryMap", progress.DEFINITE)
     n = 0
     # array is a list of lists of Ints, which are pixel categories
     self.pxls = {}  # list of pixels, keyed by category
     # TODO OPT: use iterator?
     for j in range(len(array)):
         sublist = array[j]
         for i in range(len(sublist)):
             if config.dimension() == 2:
                 where = primitives.iPoint(i, j)
                 category = sublist[i]
                 try:
                     self.pxls[category].append(where)
                 except KeyError:
                     self.pxls[category] = [where]
             elif config.dimension() == 3:
                 subsublist = sublist[i]
                 total = len(array) * len(sublist) * len(subsublist)
                 for k in range(len(subsublist)):
                     where = primitives.iPoint(i, j, k)
                     category = subsublist[k]
                     try:
                         self.pxls[category].append(where)
                     except KeyError:
                         self.pxls[category] = [where]
                     prog.setMessage("Got category for %d/%d voxels" %
                                     (n, total))
                     prog.setFraction(float(n) / total)
                     n = n + 1
     prog.finish()
Ejemplo n.º 7
0
 def applyAMR(self, skeleton):  # for adaptive mesh refinement
     prog = progress.getProgress("Refine", progress.DEFINITE)
     try:
         newSkeleton = skeleton.improperCopy(fresh=True)
         return self.refinement(skeleton, newSkeleton, None, prog)
     finally:
         prog.finish()
Ejemplo n.º 8
0
 def rationalize(self, skel, targets, criterion):
     prog = progress.getProgress("Rationalize", progress.DEFINITE)
     for rationalizer in self.rationalizers:
         rationalizer.findAndRationalize(skel, targets, criterion)
         if prog.stopped(): 
             break
     prog.finish()
Ejemplo n.º 9
0
 def apply(self, oldskeleton, context):
     prog = progress.getProgress("SplitQuads", progress.DEFINITE)
     try:
         skel = oldskeleton.properCopy(skeletonpath=context.path())
         elements = self.targets(skel, context, copy=1)
         done = 0  # No. of quads split.
         savedE = 0.0  # Saved energy from the merge
         nel = len(elements)
         for i in range(nel):        
             element = elements[i]
             if element.nnodes()==4 and element.active(skel):
                 changes = self.split_how(skel, element)
                 bestchange = self.criterion(changes, skel)
                 if bestchange is not None:
                     done += 1
                     savedE += bestchange.deltaE(skel,
                                                 self.criterion.alpha)
                     bestchange.accept(skel)                
             if prog.stopped():  
                 return None
             prog.setFraction(1.0*(i+1)/nel)
             prog.setMessage("%d/%d elements" % (i+1, nel))
         reporter.report("%d quadrilateral%s split." % (done, 's'*(done!=1)))
         skel.cleanUp()
         return skel
     finally:
         prog.finish()
Ejemplo n.º 10
0
def createPixelGroups(menuitem, image, name_template):
    if name_template is None:
        name_template = '%c'  # pre-2.0.4 behavior

    immidgecontext = imagecontext.imageContexts[image]
    ms = immidgecontext.getMicrostructure()
    mscontext = ooflib.common.microstructure.microStructures[ms.name()]
    immidge = immidgecontext.getObject()
    prog = progress.getProgress("AutoGroup", progress.DEFINITE)
    prog.setMessage("Categorizing pixels...")
    mscontext.begin_writing()
    try:
        newgrpname = autogroupMP.autogroup(ms, immidge, name_template)
    finally:
        prog.finish()
        mscontext.end_writing()

    switchboard.notify('redraw')

    if not prog.stopped():  # not interrupted
        # Do this only after releasing the ms write lock!  If the main
        # thread is waiting for the read lock, then switchboard.notify
        # will deadlock here.
        if newgrpname:
            # We only have to send the notification for the most
            # recently created group.
            switchboard.notify("new pixel group", ms.findGroup(newgrpname))
        switchboard.notify("changed pixel groups", ms.name())
Ejemplo n.º 11
0
def evolve(meshctxt, endtime):
    global linsys_dict
    starttime = meshctxt.getObject().getCurrentTime()

    # We're solving a static problem if endtime is the same as the
    # current time, or if there are no non-static steppers and output
    # is requested at at single time.
    staticProblem = (starttime == endtime
                     or (not meshctxt.timeDependent()
                         and meshctxt.outputSchedule.isSingleTime()))
    # "continuing" is true if we're continuing an earlier time
    # evolution, in which case we can assume that all Fields have
    # their correct initial values. "continuing" is never true for
    # static problems.
    continuing = (not staticProblem and
                  isinstance(meshctxt.status, meshstatus.Solved))

    targettime = endtime

    if starttime > endtime:
        raise ooferror2.ErrSetupError("End time must not precede current time.")

    meshctxt.solver_precompute(solving=True)

    meshctxt.setStatus(meshstatus.Solving())
    meshctxt.timeDiff = endtime - starttime # used to get next endtime in GUI

    meshctxt.cacheCurrentData()
    meshctxt.outputSchedule.reset(starttime, continuing)
    prog = ProgressData(starttime, endtime,
                        progress.getProgress("Solving", progress.DEFINITE))
    try:
        # Get an ordered list of subproblems to be solved.  First,
        # create tuples containing a subproblem and its solveOrder.
        subprobctxts = [(s.solveOrder, s) for s in meshctxt.subproblems()
                       if s.time_stepper is not None and s.solveFlag]
        subprobctxts.sort()     # sort by solveOrder
        subprobctxts = [s[1] for s in subprobctxts] # strip solveOrder

        # Initialize statistics.
        for subp in subprobctxts:
            subp.resetStats()
            
        if not continuing:
            # Initialize static fields in all subproblems.  For static
            # problems, this computes the actual solution.
            try:
                linsys_dict = initializeStaticFields(subprobctxts, starttime,
                                                     prog)
                # Initial output comes *after* solving static fields.
                # For fully static problems, this is the only output.
                _do_output(meshctxt, starttime)
            except ooferror2.ErrInterrupted:
                raise
            except ooferror2.ErrError, exc:
                meshctxt.setStatus(meshstatus.Failed(exc.summary()))
                raise
            except Exception, exc:
                meshctxt.setStatus(meshstatus.Failed(`exc`))
                raise
Ejemplo n.º 12
0
 def apply(self, oldskeleton, context):
     prog = progress.getProgress("SplitQuads", progress.DEFINITE)
     try:
         skel = oldskeleton.properCopy(skeletonpath=context.path())
         elements = self.targets(skel, context, copy=1)
         done = 0  # No. of quads split.
         savedE = 0.0  # Saved energy from the merge
         nel = len(elements)
         for i in range(nel):
             element = elements[i]
             if element.nnodes() == 4 and element.active(skel):
                 changes = self.split_how(skel, element)
                 bestchange = self.criterion(changes, skel)
                 if bestchange is not None:
                     done += 1
                     savedE += bestchange.deltaE(skel, self.criterion.alpha)
                     bestchange.accept(skel)
             if prog.stopped():
                 return None
             prog.setFraction(1.0 * (i + 1) / nel)
             prog.setMessage("%d/%d elements" % (i + 1, nel))
         reporter.report("%d quadrilateral%s split." % (done, 's' *
                                                        (done != 1)))
         skel.cleanUp()
         return skel
     finally:
         prog.finish()
Ejemplo n.º 13
0
 def rationalize(self, skel, context, targets, criterion):
     prog = progress.getProgress("Rationalize", progress.DEFINITE)
     for rationalizer in self.rationalizers:
         rationalizer(skel, context, targets, criterion,
                      rationalizer.findAndFix)
         if prog.stopped(): 
             break
     prog.finish()
Ejemplo n.º 14
0
def readDataFile(filename, menu):
    prog = progress.getProgress(os.path.basename(filename), progress.DEFINITE)
    try:
        source = menuparser.ProgFileInput(filename, prog)
        parser = menuparser.MenuParser(source, menu)
        parser.run()
    finally:
        prog.finish()
Ejemplo n.º 15
0
def readDataFile(filename, menu):
    prog = progress.getProgress(os.path.basename(filename), progress.DEFINITE)
    try:
        source = menuparser.ProgFileInput(filename, prog)
        parser = menuparser.MenuParser(source, menu)
        parser.run()
    finally:
        prog.finish()
Ejemplo n.º 16
0
 def __init__(self, filename, **kwargs):
     self.prog = progress.getProgress(os.path.basename(filename),
                                       progress.DEFINITE)
     scriptloader.ScriptLoader.__init__(
         self,
         filename=filename,
         locals=sys.modules['__main__'].__dict__,
         **kwargs)  
Ejemplo n.º 17
0
        def read(self, filepattern):
            dirname = os.path.dirname(filepattern)
            items = os.listdir(dirname)
            escaped_pattern = string.replace(
                re.escape(os.path.basename(filepattern)), "\\*", ".*?")
            files = []
            for item in items:
                match = re.match(escaped_pattern, item)
                if match != None:
                    span = match.span()
                    if span[0] == 0 and span[1] == len(item):
                        files.append(os.path.join(dirname, item))

            prog = progress.getProgress(os.path.basename(filepattern),
                                        progress.DEFINITE)

            try:
                # Look at the just the first file to get some info
                z = 0
                rows, npts = self.readfile(files[0], prog, z)
                nx = len(rows[0])
                ny = len(rows)
                nz = len(files)
                dx = rows[0][1].position[0] - rows[0][0].position[0]
                dy = rows[1][0].position[1] - rows[0][0].position[1]
                dz = 1
                pxlsize = primitives.Point(dx, dy, dz)
                size = rows[-1][-1].position + pxlsize
                od = orientmapdata.OrientMap(primitives.iPoint(nx, ny, nz),
                                             size)
                count = 0
                for row in rows:
                    count += 1
                    for datum in row:
                        self.addDatum(od, datum, pxlsize, ny, count, npts,
                                      prog, nz)

                # now look at the rest of the files
                for file in files[1:]:
                    z = z + dz
                    rows, nrowpts = self.readfile(file, prog, z)
                    npts = npts + nrowpts
                    count = 0
                    for row in rows:
                        count += 1
                        if len(row) != nx:
                            raise ooferror.ErrUserError(
                                "Orientation map data appears to be incomplete"
                            )

                        for datum in row:
                            self.addDatum(od, datum, pxlsize, ny, count, npts,
                                          prog, nz)

            finally:
                prog.finish()

            return od
Ejemplo n.º 18
0
 def autoSkeleton(menuitem, name, microstructure, x_periodicity,
                  y_periodicity, maxscale, minscale, units, threshold):
     # Run the actual callback in the main namespace, so that it can
     # use menu and registeredclass names trivially.
     prog = progress.getProgress(menuitem.name, progress.DEFINITE)
     utils.OOFrun(_autoSkeletonMain, prog, name, microstructure,
                  x_periodicity, y_periodicity, False, maxscale, minscale,
                  units, threshold)
     prog.finish()
Ejemplo n.º 19
0
 def read(self, filename):
     datafile = file(filename, "r")
     prog = progress.getProgress(os.path.basename(filename),
                                 progress.DEFINITE)
     try:
         od = self._read(datafile, prog)
     finally:
         prog.finish()
     return od
Ejemplo n.º 20
0
 def read(self, filename):
     datafile = file(filename, "r")
     prog = progress.getProgress(os.path.basename(filename),
                                 progress.DEFINITE)
     try:
         od = self._read(datafile, prog)
     finally:
         prog.finish()
     return od
Ejemplo n.º 21
0
 def autoSkeleton(menuitem, name, microstructure, left_right_periodicity,
                  top_bottom_periodicity, maxscale, minscale, units,
                  threshold):
     # Run the actual callback in the main namespace, so that it can
     # use menu and registeredclass names trivially.
     ## TODO: Why is the progress bar title showing up as "Thread-XX"?
     prog = progress.getProgress(menuitem.name, progress.DEFINITE)
     utils.OOFrun(_autoSkeletonMain, prog, name, microstructure,
                  left_right_periodicity, top_bottom_periodicity, False,
                  maxscale, minscale, units, threshold)
Ejemplo n.º 22
0
 def autoSkeleton(menuitem, name, microstructure, left_right_periodicity,
                  top_bottom_periodicity, front_back_periodicity, maxscale,
                  minscale, units, threshold):
     # Run the actual callback in the main namespace, so that it can
     # use menu and registeredclass names trivially.
     prog = progress.getProgress(menuitem.name, progress.DEFINITE)
     utils.OOFrun(_autoSkeletonMain, prog, name, microstructure,
                  left_right_periodicity, top_bottom_periodicity,
                  front_back_periodicity, maxscale, minscale, units,
                  threshold)
Ejemplo n.º 23
0
 def autoSkeleton(menuitem, name, microstructure,
                   left_right_periodicity, top_bottom_periodicity,
                   maxscale, minscale, units, threshold):
     # Run the actual callback in the main namespace, so that it can
     # use menu and registeredclass names trivially.
     prog = progress.getProgress(menuitem.name, progress.DEFINITE)
     utils.OOFrun(_autoSkeletonMain, prog, name, microstructure,
                  left_right_periodicity, top_bottom_periodicity,
                  False, maxscale, minscale,
                  units, threshold)
Ejemplo n.º 24
0
 def rationalize(self, skel, context, targets, criterion):
     prog = progress.getProgress("Rationalize", progress.DEFINITE)
     try:
         for rationalizer in self.rationalizers:
             rationalizer(skel, context, targets, criterion,
                          rationalizer.findAndFix)
             if prog.stopped():
                 break
     finally:
         prog.finish()
Ejemplo n.º 25
0
 def rationalize(self, skel, context, targets, criterion):
     prog = progress.getProgress("Rationalize", progress.DEFINITE)
     for ratreg in Rationalizer.registry:
         # Create a Rationalizer from a Registration.  Since fixAll
         # doesn't use the Rationalizer's parameters, just use the
         # default values.
         ratmethod = ratreg()
         ratmethod(skel, context, targets, criterion, ratmethod.fixAll)
         if prog.stopped():
             break
     prog.finish()
Ejemplo n.º 26
0
    def read(self, filename):
        hklfile = file(filename, "r")
        ## TODO  OPT: Use lineiter = iter(hklfile) instead of iter(lines).
        ## Then it's not necessary to create an array of lines.
        lines = hklfile.readlines()
        lineiter = iter(lines)
        line = lineiter.next()
        while not line.startswith('XCells'):
            line = lineiter.next()
        xcells = string.atoi(string.split(line)[1])
        line = lineiter.next()
        ycells = string.atoi(string.split(line)[1])
        line = lineiter.next()
        xstep = string.atof(string.split(line)[1])
        line = lineiter.next()
        ystep = string.atof(string.split(line)[1])
        line = lineiter.next()

        od = orientmapdata.OrientMap(
            primitives.iPoint(xcells, ycells),
            primitives.Point(xcells * xstep, ycells * ystep))

        while not string.split(line)[0] == 'Phase':
            line = lineiter.next()
        prog = progress.getProgress(os.path.basename(filename),
                                    progress.DEFINITE)
        try:
            count = 0
            npts = xcells * ycells
            for line in lineiter:
                vals = string.split(line)
                phase = vals[0]
                x = string.atof(vals[1])
                y = string.atof(vals[2])
                angles = map(string.atof, vals[5:8])
                mad = string.atof(vals[8])  # mean angular deviation
                ij = primitives.iPoint(int(round(x / xstep)),
                                       ycells - 1 - int(round(y / ystep)))
                try:
                    self.phaselists[phase].append(ij)
                except KeyError:
                    self.phaselists[phase] = [ij]
                self.set_angle(
                    od, ij,
                    corientation.COrientBunge(*map(math.radians, angles)))
                prog.setFraction(float(count) / npts)
                prog.setMessage("%d/%d orientations" % (count, npts))
                count += 1
                if pbar.stopped():
                    return None
        finally:
            prog.finish()
        return od
Ejemplo n.º 27
0
    def read(self, filename):
        hklfile = file(filename, "r")
        lineiter = iter(hklfile)
        line = lineiter.next()
        while not line.startswith('XCells'):
            line = lineiter.next()
        xcells = string.atoi(string.split(line)[1])
        line = lineiter.next()
        ycells = string.atoi(string.split(line)[1])
        line = lineiter.next()
        xstep = string.atof(string.split(line)[1])
        line = lineiter.next()
        ystep = string.atof(string.split(line)[1])
        line = lineiter.next()


        od = orientmapdata.OrientMap(
            primitives.iPoint(xcells, ycells),
            primitives.Point(xcells*xstep, ycells*ystep))
        
        while not string.split(line)[0] == 'Phase':
            line = lineiter.next()
        prog = progress.getProgress(os.path.basename(filename),
                                    progress.DEFINITE)
        try:
            count = 0
            npts = xcells*ycells
            for line in lineiter:
                vals = string.split(line)
                phase = vals[0]
                x = string.atof(vals[1])
                y = string.atof(vals[2])
                angles = map(string.atof, vals[5:8])
                mad = string.atof(vals[8])  # mean angular deviation
                ij = primitives.iPoint(
                    int(round(x/xstep)),
                    ycells-1-int(round(y/ystep)))
                try:
                    self.phaselists[phase].append(ij)
                except KeyError:
                    self.phaselists[phase] = [ij]
                self.set_angle(
                    od, ij, 
                    corientation.COrientBunge(*map(math.radians, angles)))
                prog.setFraction(float(count)/npts)
                prog.setMessage("%d/%d orientations" % (count, npts))
                count += 1
                if prog.stopped():
                    return None
        finally:
            prog.finish()
        return od
Ejemplo n.º 28
0
def _scalarFunctionOutput(mesh, elements, coords, f):
    ans = []
    t = mesh.getCurrentTime()
    prog = progress.getProgress("Function evaluation", progress.DEFINITE)
    ecount = 0 
    nel = mesh.nelements()
    for element, coordlist in itertools.izip(elements, coords):
        realcoords = itertools.imap(element.from_master, coordlist)
        ans.extend(outputval.ScalarOutputVal(f(coord, t)) for coord in realcoords)
        ecount += 1
        prog.setFraction((1.*ecount)/nel)
        prog.setMessage("%d/%d elements" % (ecount, nel))
    prog.finish()
    return ans
Ejemplo n.º 29
0
def _loadNodes(menuitem, skeleton, points):
    # read nodes as (x,y) tuples of floats
    skelcontext = skeletoncontext.skeletonContexts[skeleton]
    skeleton = skelcontext.getObject()
    npts = len(points)
    prog = progress.getProgress("Loading nodes", progress.DEFINITE)
    try:
        for i, node in enumerate(points):
            prog.setMessage("%d/%d" % (i, npts))
            prog.setFraction(float(i) / npts)
            skeleton.addNode(node)
    finally:
        prog.finish()
    if config.dimension() == 2:
        skelcontext.updateGroupsAndSelections()
    switchboard.notify(('who changed', 'Skeleton'), skelcontext)
Ejemplo n.º 30
0
def _loadFaceSelection(menuitem, skeleton, faces):
    skelcontext = skeletoncontext.skeletonContexts[skeleton]
    skel = skelcontext.getObject()
    trackerlist = skelcontext.faceselection.currentSelection()
    tracker = trackerlist.selected[skel]
    n = len(faces)
    prog = progress.getProgress("Loading face selection", progress.DEFINITE)
    try:
        for f, (i, j, k) in enumerate(faces):
            prog.setMessage("%d/%d" % (f, n))
            prog.setFraction(float(f) / n)
            tracker.add(skel.getFaceByNodeIndices(i, j, k))
        tracker.write()
    finally:
        prog.finish()
    switchboard.notify(skelcontext.faceselection.mode().changedselectionsignal,
                       selection=skelcontext.faceselection)
Ejemplo n.º 31
0
def _loadNodeSelection(menuitem, skeleton, nodes):
    skelcontext = skeletoncontext.skeletonContexts[skeleton]
    skel = skelcontext.getObject()
    trackerlist = skelcontext.nodeselection.currentSelection()
    tracker = trackerlist.selected[skel]
    prog = progress.getProgress("Loading node selection", progress.DEFINITE)
    ntotal = len(nodes)
    try:
        for n, nodeSource in enumerate(nodes):
            prog.setMessage("%d/%d" % (n, ntotal))
            prog.setFraction(float(n) / ntotal)
            tracker.add(skel.getNode(nodeSource))
        tracker.write()
    finally:
        prog.finish()
    switchboard.notify(skelcontext.nodeselection.mode().changedselectionsignal,
                       selection=skelcontext.nodeselection)
Ejemplo n.º 32
0
def _loadElements(menuitem, skeleton, nodes):
    # read elements as tuples of node indices
    skelcontext = skeletoncontext.skeletonContexts[skeleton]
    skeleton = skelcontext.getObject()
    nel = len(nodes)
    prog = progress.getProgress("Loading elements", progress.DEFINITE)
    try:
        for i, nodelist in enumerate(nodes):
            prog.setMessage("%d/%d" % (i, nel))
            prog.setFraction(float(i) / nel)
            skeleton.loadElement(nodelist)
    finally:
        prog.finish()
    # skelcontext.getTimeStamp(None).increment()
    if config.dimension() == 2:
        skelcontext.updateGroupsAndSelections()
    switchboard.notify(('who changed', 'Skeleton'), skelcontext)
Ejemplo n.º 33
0
def _loadElementSelection(menuitem, skeleton, elements):
    skelcontext = skeletoncontext.skeletonContexts[skeleton]
    skel = skelcontext.getObject()
    trackerlist = skelcontext.elementselection.currentSelection()
    tracker = trackerlist.selected[skel]
    prog = progress.getProgress("Loading element selection", progress.DEFINITE)
    n = len(elements)
    try:
        for i, elementSource in enumerate(elements):
            prog.setMessage("%d/%d" % (i, n))
            prog.setFraction(float(i) / n)
            tracker.add(skel.getElement(elementSource))
        tracker.write()
    finally:
        prog.finish()
    switchboard.notify(
        skelcontext.elementselection.mode().changedselectionsignal,
        selection=skelcontext.elementselection)
Ejemplo n.º 34
0
        def read(self, filename):
            prog = progress.getProgress(os.path.basename(filename),
                                        progress.DEFINITE)
            rows, npts = self.readfile(filename, prog)

            try:

                nx = len(rows[0])
                ny = len(rows)
                count = 0
                for row in rows:
                    count += 1
                    if len(row) != nx:
                        raise ooferror.ErrUserError(
                            "Orientation map data appears to be incomplete")

                # pixel size
                dx = rows[0][1].position[0] - rows[0][0].position[0]
                dy = rows[1][0].position[1] - rows[0][0].position[1]
                pxlsize = primitives.Point(dx, dy)

                # If we assume that the points are in the centers of the
                # pixels, then the actual physical size is one pixel bigger
                # than the range of the xy values.
                size = rows[-1][-1].position + pxlsize

                debug.fmsg("nx=", nx, "ny=", ny, "size=", size, "pxlsize=",
                           pxlsize)

                if config.dimension() == 2:
                    od = orientmapdata.OrientMap(primitives.iPoint(nx, ny),
                                                 size)
                else:
                    od = orientmapdata.OrientMap(primitives.iPoint(nx, ny, 1),
                                                 size)
                count = 0
                for row in rows:
                    for datum in row:
                        self.addDatum(od, datum, pxlsize, ny, count, npts,
                                      prog)

            finally:
                prog.finish()
            return od
Ejemplo n.º 35
0
        def draw(self, gfxwindow, device):
            self.lock.acquire()
            meshctxt = mainthread.runBlock(self.who().resolve, (gfxwindow,))
            mesh = meshctxt.getObject()
            meshctxt.restoreCachedData(self.getTime(meshctxt))
            prog = progress.getProgress("Contour plot", progress.DEFINITE)
            try:
                meshctxt.precompute_all_subproblems()
                # self.draw_subcells(mesh, device)
                device.comment("PlainContourDisplay")
                device.set_lineWidth(self.width)
                device.set_lineColor(self.color)

                # clevels is a list of contour values.
                # evalues is a list of lists of function values for the
                # nodes of each element.
                clevels, evalues = self.find_levels(mesh, self.what)
                ecount = 0
                for element in mesh.elements():
                    ## TODO MERGE: hideEmptyElements used to be a
                    ## gfxwindow setting, but in 3D it was removed.
                    ## Mesh filters now accomplish the same thing.
                    if (not gfxwindow.settings.hideEmptyElements) or \
                           ( element.material() is not None ) :
                        (contours, elmin, elmax)  = contour.findContours(
                            mesh, element, self.where,
                            self.what, clevels,
                            self.nbins, 0)
                        for cntr in contours:
                            for loop in cntr.loops:
                                device.draw_polygon(loop)
                            for curve in cntr.curves:
                                device.draw_curve(curve)
                    ecount += 1
                    prog.setFraction((1.*ecount)/mesh.nelements())
                    prog.setMessage("drawing %d/%d elements" %
                                    (ecount, mesh.nelements()))
                self.contour_min = min(clevels)
                self.contour_max = max(clevels)
                self.contour_levels = clevels
                contour.clearCache()
            finally:
                self.lock.release()
                meshctxt.releaseCachedData()
Ejemplo n.º 36
0
        def draw(self, gfxwindow, device):
            self.lock.acquire()
            meshctxt = mainthread.runBlock(self.who().resolve, (gfxwindow,))
            mesh = meshctxt.getObject()
            meshctxt.restoreCachedData(self.getTime(meshctxt, gfxwindow))
            prog = progress.getProgress("Contour plot", progress.DEFINITE)
            try:
                meshctxt.precompute_all_subproblems()
        #        self.draw_subcells(mesh, device)
                device.comment("PlainContourDisplay")
                device.set_lineWidth(self.width)
                device.set_lineColor(self.color)

                # clevels is a list of contour values.
                # evalues is a list of lists of function values for the
                # nodes of each element.
                clevels, evalues = self.find_levels(mesh, self.what)
                ecount = 0
                for element in mesh.element_iterator():
                    if (not gfxwindow.settings.hideEmptyElements) or \
                           ( element.material() is not None ) :
                        (contours, elmin, elmax)  = contour.findContours(
                            mesh, element, self.where,
                            self.what, clevels,
                            self.nbins, 0)
                        for cntr in contours:
                            for loop in cntr.loops:
                                device.draw_polygon(loop)
                            for curve in cntr.curves:
                                device.draw_curve(curve)
                    ecount += 1
                    prog.setFraction((1.*ecount)/mesh.nelements())
                    prog.setMessage("drawing %d/%d elements" %
                                    (ecount, mesh.nelements()))
                self.contour_min = min(clevels)
                self.contour_max = max(clevels)
                self.contour_levels = clevels
                contour.clearCache()
            finally:
                self.lock.release()
                meshctxt.releaseCachedData()
                prog.finish()
Ejemplo n.º 37
0
def autoPixelGroup(menuitem, grouper, delta, gamma, minsize, contiguous,
                   name_template, clear):
    ms = grouper.mscontext.getObject()
    if "%n" not in name_template:
        name_template = name_template + "%n"
    prog = progress.getProgress('AutoGroup', progress.DEFINITE)
    prog.setMessage('Grouping pixels...')
    grouper.mscontext.begin_writing()
    newgrpname = None
    try:
        newgrpname = statgroups.statgroups(ms, grouper.cobj, delta, gamma,
                                           minsize, contiguous, name_template,
                                           clear)
    finally:
        prog.finish()
        grouper.mscontext.end_writing()
    if newgrpname:
        switchboard.notify("new pixel group", ms.findGroup(newgrpname))
    switchboard.notify("changed pixel groups", ms.name())
    switchboard.notify("redraw")
Ejemplo n.º 38
0
def _flux(mesh, elements, coords, flux):
    ans = []
    prog = progress.getProgress("Evaluating flux", progress.DEFINITE)
    #nel = mesh.nelements()      # No!  Should be len(elements), but
                                # elements is a generator.
    elist = list(elements)
    nel = len(elist)
    try:
        ecount = 0
        for element, ecoords in itertools.izip(elist, coords):
            mesh.begin_all_subproblems(element)
    ##        element.begin_material_computation(mesh)
            ans.append(element.outputFluxes(mesh, flux, ecoords))
    ##        element.end_material_computation(mesh)
            mesh.end_all_subproblems(element)
            ecount += 1
            prog.setFraction((1.*ecount)/nel)
            prog.setMessage("%d/%d elements" % (ecount, nel))
        return utils.flatten1(ans)
    finally:
        prog.finish()
Ejemplo n.º 39
0
def _flux(mesh, elements, coords, flux):
    ans = []
    prog = progress.getProgress("Evaluating flux", progress.DEFINITE)
    #nel = mesh.nelements()      # No!  Should be len(elements), but
    # elements is a generator.
    elist = list(elements)
    nel = len(elist)
    try:
        ecount = 0
        for element, ecoords in itertools.izip(elist, coords):
            mesh.begin_all_subproblems(element)
            ##        element.begin_material_computation(mesh)
            ans.append(element.outputFluxes(mesh, flux, ecoords))
            ##        element.end_material_computation(mesh)
            mesh.end_all_subproblems(element)
            ecount += 1
            prog.setFraction((1. * ecount) / nel)
            prog.setMessage("%d/%d elements" % (ecount, nel))
        return utils.flatten1(ans)
    finally:
        prog.finish()
Ejemplo n.º 40
0
    def apply(self, oldskeleton, context):
        prog = progress.getProgress("Merge", progress.DEFINITE)
        try:
            skel = oldskeleton.properCopy(skeletonpath=context.path())
            elements = self.targets(skel, context, copy=1)
            random.shuffle(elements)
            # A dict. keyed by element to prevent considering merging
            # element which does not exist any more.
            processed = {}  # Merged triangles
            done = 0  # No. of triangles merged.
            savedE = 0.0  # Saved energy from the merge
            nel = len(elements)
            for i in range(nel):
                element = elements[i]
                if (element not in processed and 
                    element.nnodes()==3 and element.active(skel)):
                    changes = self.mergeTriangles(element, skel, processed)
                    bestchange = self.criterion(changes, skel)
                    if bestchange is not None:
                        done += 2
                        savedE += bestchange.deltaE(skel,
                                                    self.criterion.alpha)
                        bestchange.accept(skel)
                        # Now that these two are merged, we need to indicate
                        # that these are not to be looked at again.
                        for e in bestchange.removed:
                            processed[e] = 1
                if prog.stopped():
                    return None
                else:
                    prog.setFraction(1.0*(i+1)/nel)
                    prog.setMessage("%d/%d" % (i+1, nel))

            reporter.report("Merged %d triangles, saving energy %f" %\
                            (done, savedE))
            skel.cleanUp()
            return skel
        finally:
            prog.finish()
Ejemplo n.º 41
0
    def apply(self, oldskeleton, context):
        try:
            prog = progress.getProgress("Merge", progress.DEFINITE)
            skel = oldskeleton.properCopy(skeletonpath=context.path())
            elements = self.targets(skel, context, copy=1)
            random.shuffle(elements)
            # A dict. keyed by element to prevent considering merging
            # element which does not exist any more.
            processed = {}  # Merged triangles
            done = 0  # No. of triangles merged.
            savedE = 0.0  # Saved energy from the merge
            nel = len(elements)
            for i in range(nel):
                element = elements[i]
                if element not in processed and \
                   element.nnodes()==3 and element.active(skel):
                    changes = self.mergeTriangles(element, skel, processed)
                    bestchange = self.criterion(changes, skel)
                    if bestchange is not None:
                        done += 2
                        savedE += bestchange.deltaE(skel,
                                                    self.criterion.alpha)
                        bestchange.accept(skel)
                        # Now that these two are merged, we need to indicate
                        # that these are not to be looked at again.
                        for e in bestchange.removed:
                            processed[e] = 1
                if prog.stopped():
                    return None
                else:
                    prog.setFraction(1.0*(i+1)/nel)
                    prog.setMessage("%d/%d" % (i+1, nel))

            reporter.report("Merged %d triangles, saving energy %f" %\
                            (done, savedE))
            skel.cleanUp()
            return skel
        finally:
            prog.finish()
Ejemplo n.º 42
0
    def draw(self, gfxwindow, device):
        self.lock.acquire()
        meshctxt = mainthread.runBlock(self.who().resolve, (gfxwindow, ))
        mesh = meshctxt.getObject()
        meshctxt.restoreCachedData(self.getTime(meshctxt, gfxwindow))
        prog = progress.getProgress("Contour plot", progress.DEFINITE)
        try:
            self.contour_max = None
            self.contour_min = None
            self.contour_levels = []
            meshctxt.precompute_all_subproblems()
            device.comment("FilledContourDisplay")
            # clevels is a list of contour values.
            # evalues is a list of lists of function values
            # for the nodes of each element.
            clevels, evalues = self.find_levels(mesh, self.what)
            minval = min(clevels)
            maxval = max(clevels)
            valrange = maxval - minval
            if valrange == 0.0:
                valrange = 1
            factor = 1. / valrange
            offset = -minval * factor
            if config.dimension() == 2:
                device.set_colormap(self.colormap)
                ecount = 0
                # TODO: we might want to use itertools here
                for element, values in zip(mesh.element_iterator(), evalues):
                    if ( not gfxwindow.settings.hideEmptyElements ) or \
                           ( element.material() is not None ) :
                        (contours, elmin,
                         elmax) = contour.findContours(mesh, element,
                                                       self.where, self.what,
                                                       clevels, self.nbins, 1)

                        # Before drawing anything, fill the element with the
                        # largest contour value below its lowest detected value.
                        vmin = min(values)
                        prevcntour = None
                        for cntour in contours:
                            if cntour.value > elmin:
                                if prevcntour:
                                    device.set_fillColor(offset +
                                                         prevcntour.value *
                                                         factor)
                                else:
                                    device.set_fillColor(0.0)
                                break
                            prevcntour = cntour
                        else:
                            # If all of the contours were below the
                            # element, fill the element with the color
                            # from the top of the colormap.
                            device.set_fillColor(1.0)
                        # Find element perimeter
                        edges = element.perimeter()
                        mcorners = [[0.0]] * element.ncorners()
                        corners = self.where.evaluate(mesh, edges, mcorners)
                        device.fill_polygon(
                            primitives.pontify(primitives.Polygon(corners)))

                        # Now fill contours
                        for cntour in contours:
                            # This is harder than it looks.
                            if len(cntour.loops) == 1:
                                device.set_fillColor(offset +
                                                     cntour.value * factor)
                                device.fill_polygon(cntour.loops[0])
                            elif len(cntour.loops) > 1:
                                device.set_fillColor(offset +
                                                     cntour.value * factor)
                                # Compound Polygon fill
                                device.fill_polygon(cntour.loops)
                    ecount += 1
                    prog.setFraction((1.0 * ecount) / mesh.nelements())
                    prog.setMessage("drawing %d/%d elements" %
                                    (ecount, mesh.nelements()))
                #  self.draw_subcells(mesh, device)
                contour.clearCache()

            elif config.dimension() == 3:
                # TODO 3D: this should be more seamless when meshes
                # use vtk objects.  Also, will need to update for
                # quadratic elements.
                lut = self.colormap.getVtkLookupTable(self.levels, minval,
                                                      maxval)
                numnodes = mesh.nnodes()
                nodes = mesh.node_iterator()
                points = vtk.vtkPoints()
                points.Allocate(numnodes, numnodes)
                data = vtk.vtkDoubleArray()
                data.SetNumberOfComponents(0)
                data.SetNumberOfValues(numnodes)

                while not nodes.end():
                    node = nodes.node()
                    points.InsertNextPoint(node[0], node[1], node[2])
                    nodes.next()

                grid = vtk.vtkUnstructuredGrid()
                nelements = mesh.nelements()
                grid.Allocate(nelements, nelements)
                elements = mesh.element_iterator()

                # this will reset some values. TODO 3D: think about
                # plotting discontinuous stuff with vtk - could add
                # points to points object here
                for element, values in zip(elements, evalues):
                    elnodes = element.ncorners()
                    for i in xrange(elnodes):
                        data.SetValue(element.getPointIds().GetId(i),
                                      values[i])
                    grid.InsertNextCell(element.GetCellType(),
                                        element.getPointIds())

                grid.SetPoints(points)
                grid.GetPointData().SetScalars(data)
                device.draw_unstructuredgrid_with_lookuptable(grid,
                                                              lut,
                                                              mode="point")

                self.lookuptable = lut

            self.contour_min = minval
            self.contour_max = maxval
            self.contour_levels = clevels

        finally:
            self.lock.release()
            meshctxt.releaseCachedData()
            prog.finish()
Ejemplo n.º 43
0
 def apply(self, oldskeleton, context):
     prog = progress.getProgress("Relax", progress.DEFINITE)
     prog.setMessage("Preparing to relax...")
     return oldskeleton.deputyCopy()
Ejemplo n.º 44
0
    def postProcess(self, context):
        ## This function first creates a mesh with custom-made properties,
        ## then assigns "temporary" properties to pixels
        ## and specifies BCs and equations.
        ## Next, iterates for the solution using the specified solver,
        ## accepts all node moves and redraws skeleton.
        ## It repeats these steps until iteration criteria has been met.
        ## Finally, the (temporary) mesh and the rest  of the temporary
        ## objects are cleaned up.

        ## create progress bar
        prog = progress.getProgress("Relax", progress.DEFINITE)

        ## get skeleton and calculate energy
        skeleton = context.getObject()
        before = skeleton.energyTotal(self.alpha)
        self.count = 0

        while self.goodToGo(skeleton) and not prog.stopped():
            ## femesh is created and properties are assigned
            mesh = self.create_mesh(context) # mesh context object

            ## define displacement field
            self.define_fields(mesh)
            ## activate the mechanical balance equation
            self.activate_equations(mesh)
            mesh.changed("Relaxing")
            ## constrain the nodes on the boundaries to only slide
            ## along the edge
            self.set_boundary_conditions(mesh)

            # solve linear system.
            self.coreProcess(mesh, mesh.get_default_subproblem())
            if prog.stopped():
                break

            # Update positions of nodes in the Skeleton
            context.begin_writing()
            try:
                self.update_node_positions(skeleton, mesh)
            finally:
                context.end_writing()

            mesh.lockAndDelete()

            switchboard.notify("skeleton nodes moved", context)
            switchboard.notify("redraw")

            self.updateIteration() ## update iteration manager machinery
            prog.setFraction(1.0*self.count/self.iterations)
            prog.setMessage("%d/%d iterations" % (self.count, self.iterations))

        prog.finish()

        ## calculate total energy improvement, if any.
        after = skeleton.energyTotal(self.alpha)
        if before:
            rate = 100.0*(before-after)/before
        else:
            rate = 0.0
        diffE = after - before
        reporter.report("Relaxation complete: deltaE = %10.4e (%6.3f%%)"
                        % (diffE, rate))

        if config.dimension() == 2:
            del self.topBoundaryCondition
            del self.leftBoundaryCondition
            del self.bottomBoundaryCondition
            del self.rightBoundaryCondition

        materialmanager.materialmanager.delete_prop(self.stiffness.name())
        materialmanager.materialmanager.delete_prop(self.skelRelRate.name())
        propertyregistration.AllProperties.delete(self.stiffness.name())
        propertyregistration.AllProperties.delete(self.skelRelRate.name())
        materialmanager.materialmanager.delete_secret(self.materialName)
Ejemplo n.º 45
0
    def apply(self, oldskeleton, context):
        prog = progress.getProgress("SnapNodes", progress.DEFINITE)
        prog.setMessage("examining elements...")
        skel = oldskeleton.deputyCopy()
        skel.activate()

        # Examine elements and create NodeSnapper objects
        movedict = {}                   # dict of all moves, keyed by element
        movelists = {}         # dict of lists of all moves, keyed by priority
        elements = self.targets(context)

        # Big try-finally block to ensure that
        # self.targets.cleanSelection() gets called if something goes
        # wrong.
        try:
            stored_tps = {}  # keyed by node pair
            nel = len(elements)
            for i, element in enumerate(elements):
                if element.homogeneity(skel.MS) == 1.0:
                    continue  # no need to even look at it!
                if element.active(oldskeleton):
                    #nnodes = element.nnodes()
                    # Common segments will be looked at only once.
                    # With a small Skeleton, this takes more time due to
                    # additional book-keeping stuff.
                    transitionpts = []
                    for n, nodes in zip(range(element.getNumberOfEdges()), 
                                              element.segment_node_iterator()):
                        #for n in range(nnodes):
                        #key = skeletonnode.canonical_order(
                        #    element.nodes[n], element.nodes[(n+1)%nnodes])
                        key = skeletonnode.canonical_order(nodes[0], nodes[1])
                        try:
                            transitionpts.append(stored_tps[key])
                        except KeyError:
                            tp = element.transitionPoint(skel, n)
                            stored_tps[key] = tp
                            transitionpts.append(tp)
                    nodemotion = getNodeSnapper(element, transitionpts)
                    if nodemotion is not None:
                        movedict[element] = nodemotion
                        try:
                            movelists[nodemotion.priority].append(nodemotion)
                        except KeyError:
                            movelists[nodemotion.priority] = [nodemotion]
                if prog.stopped() :
                    return None
                prog.setFraction(1.0*(i+1)/nel)
                prog.setMessage("examined %d/%d elements" % (i+1, nel))

            # Perform node motions in random order within their
            # priority classes
            priorities = movelists.keys()
            priorities.sort()
            # A set to keep track of moved nodes & use them to assist
            # movers that are associated with these nodes.
            movednodes = set()
            for p in priorities:
                movelist = movelists[p]
                random.shuffle(movelist)
                nmv = len(movelist)
                for i in range(nmv):
                    move = movelist[i]
                    move.perform(skel, self.criterion, movedict, movednodes)
                    if prog.stopped():
                        return None
                    prog.setFraction(1.0*(i+1)/nmv)
                    prog.setMessage("Type %d: %d/%d" % (p, i+1, nmv))
            nmoved = len(movednodes)
        finally:  
            self.targets.cleanSelection()
            prog.finish()

        # Only need to clean up the skeleton if we're going to return it.
        skel.cleanUp()
        reporter.report("Snapped %d node%s." % (nmoved, "s"*(nmoved != 1)))
        return skel
Ejemplo n.º 46
0
 def makeProgress(self):
     return progress.getProgress(self.__class__.__name__,
                                  self.iteration.get_progressbar_type())
Ejemplo n.º 47
0
    def draw(self, gfxwindow, device): # 2D only
        self.lock.acquire()
        meshctxt = mainthread.runBlock(self.who().resolve, (gfxwindow,))
        mesh = meshctxt.getObject()
        meshctxt.restoreCachedData(self.getTime(meshctxt))
        prog = progress.getProgress("Contour plot", progress.DEFINITE)
        try:
            self.contour_max = None
            self.contour_min = None
            self.contour_levels = []
            meshctxt.precompute_all_subproblems()
            # device.comment("FilledContourDisplay")
            # clevels is a list of contour values.
            # evalues is a list of lists of function values
            # for the nodes of each element.
            clevels, evalues = self.find_levels(mesh, self.what)
            minval = min(clevels)
            maxval = max(clevels)
            nlevels = len(clevels)
            valrange = maxval - minval
            if valrange == 0.0:
                valrange = 1
            factor = 1./valrange
            offset = -minval*factor

            device.set_colormap(self.colormap)
            ecount = 0
            for element, values in zip(mesh.elements(), evalues):
                ## TODO MERGE: hideEmptyElements used to be a
                ## gfxwindow setting, but in 3D it was removed.
                ## Mesh filters now accomplish the same thing.
                if ( not gfxwindow.settings.hideEmptyElements ) or \
                       ( element.material() is not None ) :
                    (contours, elmin, elmax)  = contour.findContours(
                        mesh, element, self.where,
                        self.what, clevels,
                        self.nbins, 1)

                    # Before drawing anything, fill the element with the
                    # largest contour value below its lowest detected value.
                    vmin = min(values)
                    prevcntour = None
                    for cntour in contours:
                        if cntour.value > elmin:
                            if prevcntour:
                                device.set_fillColor(
                                    offset + prevcntour.value*factor)
                            else:
                                device.set_fillColor(0.0)
                            break
                        prevcntour = cntour
                    else:
                        # If all of the contours were below the
                        # element, fill the element with the color
                        # from the top of the colormap.
                        device.set_fillColor(1.0)
                    # Find element perimeter
                    edges = element.perimeter()
                    mcorners = [[0.0]]*element.ncorners()
                    corners = self.where.evaluate(mesh, edges, mcorners)
                    device.fill_polygon(primitives.pontify(
                            primitives.Polygon(corners)))

                    # Now fill contours
                    for cntour in contours:
                        # This is harder than it looks.
                        if len(cntour.loops) == 1:
                            device.set_fillColor(
                                offset + cntour.value*factor)
                            device.fill_polygon(cntour.loops[0])
                        elif len(cntour.loops) > 1:
                            device.set_fillColor(
                                offset + cntour.value*factor)
                            # Compound Polygon fill
                            device.fill_polygon(cntour.loops)
                ecount += 1
                prog.setFraction((1.0*ecount)/mesh.nelements())
                prog.setMessage("drawing %d/%d elements" %
                                (ecount, mesh.nelements()))
            #  self.draw_subcells(mesh, device)
            contour.clearCache()
                
            self.contour_min = minval
            self.contour_max = maxval
            self.contour_levels = clevels

        finally:
            self.lock.release()
            meshctxt.releaseCachedData()
Ejemplo n.º 48
0
 def apply(self, oldskeleton, context):
     prog = progress.getProgress("Edge swap", progress.DEFINITE)
     try:
         return self._apply(oldskeleton, context, prog)
     finally:
         prog.finish()
Ejemplo n.º 49
0
    def draw(self, gfxwindow, device):
        self.lock.acquire()
        meshctxt = mainthread.runBlock(self.who().resolve, (gfxwindow,))
        mesh = meshctxt.getObject()
        meshctxt.restoreCachedData(self.getTime(meshctxt, gfxwindow))
        prog = progress.getProgress("Contour plot", progress.DEFINITE)
        try:
            self.contour_max = None
            self.contour_min = None
            self.contour_levels = []
            meshctxt.precompute_all_subproblems()
            device.comment("FilledContourDisplay")
            # clevels is a list of contour values.
            # evalues is a list of lists of function values
            # for the nodes of each element.
            clevels, evalues = self.find_levels(mesh, self.what)
            minval = min(clevels)
            maxval = max(clevels)
            valrange = maxval - minval
            if valrange == 0.0:
                valrange = 1
            factor = 1./valrange
            offset = -minval*factor
            if config.dimension() == 2:
                device.set_colormap(self.colormap)
                ecount = 0
                # TODO: we might want to use itertools here
                for element, values in zip(mesh.element_iterator(), evalues):
                    if ( not gfxwindow.settings.hideEmptyElements ) or \
                           ( element.material() is not None ) :
                        (contours, elmin, elmax) = contour.findContours(
                            mesh, element, self.where,
                            self.what, clevels,
                            self.nbins, 1)

                        # Before drawing anything, fill the element with the
                        # largest contour value below its lowest detected value.
                        vmin = min(values)
                        prevcntour = None
                        for cntour in contours:
                            if cntour.value > elmin:
                                if prevcntour:
                                    device.set_fillColor(
                                        offset + prevcntour.value*factor)
                                else:
                                    device.set_fillColor(0.0)
                                break
                            prevcntour = cntour
                        else:
                            # If all of the contours were below the
                            # element, fill the element with the color
                            # from the top of the colormap.
                            device.set_fillColor(1.0)
                        # Find element perimeter
                        edges = element.perimeter()
                        mcorners = [[0.0]]*element.ncorners()
                        corners = self.where.evaluate(mesh, edges, mcorners)
                        device.fill_polygon(primitives.pontify(
                                primitives.Polygon(corners)))

                        # Now fill contours
                        for cntour in contours:
                            # This is harder than it looks.
                            if len(cntour.loops) == 1:
                                device.set_fillColor(
                                    offset + cntour.value*factor)
                                device.fill_polygon(cntour.loops[0])
                            elif len(cntour.loops) > 1:
                                device.set_fillColor(
                                    offset + cntour.value*factor)
                                # Compound Polygon fill
                                device.fill_polygon(cntour.loops)
                    ecount += 1
                    prog.setFraction((1.0*ecount)/mesh.nelements())
                    prog.setMessage("drawing %d/%d elements" %
                                    (ecount, mesh.nelements()))
                #  self.draw_subcells(mesh, device)
                contour.clearCache()

            elif config.dimension() == 3:
                # TODO 3D: this should be more seamless when meshes
                # use vtk objects.  Also, will need to update for
                # quadratic elements.
                lut = self.colormap.getVtkLookupTable(self.levels,minval,maxval)
                numnodes = mesh.nnodes()
                nodes = mesh.node_iterator()
                points = vtk.vtkPoints()
                points.Allocate(numnodes,numnodes)
                data = vtk.vtkDoubleArray()
                data.SetNumberOfComponents(0)
                data.SetNumberOfValues(numnodes)
                    

                while not nodes.end():
                    node = nodes.node()
                    points.InsertNextPoint(node[0],node[1],node[2])
                    nodes.next()

                grid = vtk.vtkUnstructuredGrid()
                nelements = mesh.nelements()
                grid.Allocate(nelements, nelements)
                elements = mesh.element_iterator()

                # this will reset some values. TODO 3D: think about
                # plotting discontinuous stuff with vtk - could add
                # points to points object here
                for element, values in zip(elements, evalues):
                    elnodes = element.ncorners()
                    for i in xrange(elnodes):
                        data.SetValue(element.getPointIds().GetId(i), values[i])
                    grid.InsertNextCell(element.GetCellType(), element.getPointIds())                 

                grid.SetPoints(points)
                grid.GetPointData().SetScalars(data)
                device.draw_unstructuredgrid_with_lookuptable(grid, lut, mode="point")

                self.lookuptable = lut
                
            self.contour_min = minval
            self.contour_max = maxval
            self.contour_levels = clevels

        finally:
            self.lock.release()
            meshctxt.releaseCachedData()
            prog.finish()
Ejemplo n.º 50
0
    def solve(self, matrix_method, precompute, compute_residual,
              compute_jacobian, compute_linear_coef_mtx, data, values):

        # matrix_method is function that takes a matrix A, a rhs b and
        # a vector of unknows x and sets x so that A*x = b.

        # 'data' is user defined object that can be used to pass
        # information from the calling function to the callback
        # functions if necessary.  The callback functions are
        # precompute, compute_residual, and compute_jacobian.

        # precompute will be called before calling compute_residual
        # and compute_jacobian, and should perform any calculations
        # shared by those two functions.  Its arguments are 'data'
        # (the user defined object), 'values' (a DoubleVec containing
        # a trial solution) and 'needJacobian' (a boolean indicating
        # whether or not the Jacobian needs to be recomputed).

        # compute_residual is called only after precompute.  Its
        # arguments are 'data' (the same object that was used for
        # precompute), 'values' (the trial solution), and residual
        # (the resulting vector of residuals).

        # compute_jacobian is also called only after precompute.  It
        # only takes the 'data' argument.

        # TODO OPT: The vectors and matrices computed by
        # compute_residual, compute_jacobian, and
        # compute_linear_coef_mtx can be preallocated here and passed
        # to the functions, instead of being repeatedly reallocated on
        # each function call.

        # debug.fmsg("initial values=", values.norm())
        n = values.size()

        update   = doublevec.DoubleVec(n)

        # compute the residual = -K*startValues + rhs
        self.requireResidual(True)
        self.requireJacobian(True)
        precompute(data, values, self)
        residual = compute_residual(data, values, self)

        res_norm0 = residual.norm() # norm of the initial residual
        res_norm  = res_norm0       # we will keep comparing current residual
                                    # with res_norm0 to judge convergence
        # debug.fmsg("initial residual:", res_norm0)

        prog = progress.getProgress("Newton Solver", progress.LOGDEFINITE)
        target_res = self.relative_tolerance*res_norm0 + self.absolute_tolerance
        if res_norm0 > target_res:
            prog.setRange(res_norm0, target_res)
        try:
            # compute Newton updates while residual is large and
            # self.maximum_iterations is not exceeded
            s = 1.
            i = 0
            while (res_norm > target_res and i < self.maximum_iterations
                   and not prog.stopped()):
                # debug.fmsg("iter =", i, ",  res =", res_norm, " s =", s)
                update.zero()
                # solve for the Newton step:  Jacobian * update = -residual
                J = compute_jacobian(data, self)
                # debug.fmsg("J=\n", J.norm())
                residual *= -1.0
                matrix_method.solve( J, residual, update )
                # debug.fmsg("update=", update.norm())

                # choose step size for the Newton update.  This resets
                # self.requireXXX.
                s, residual = self.choose_step_size(
                    data, values, update, res_norm,
                    precompute, compute_residual)
                # debug.fmsg("s=", s)
                # correct the soln with the Newton update
                values += s * update

                res_norm = residual.norm()
                if res_norm <= target_res:
                    break

                # update the linear system
                self.requireJacobian(True)
                self.requireResidual(True)
                # debug.fmsg("norm updated values=", values.norm())
                precompute(data, values, self)
                # compute the residual
                residual = compute_residual(data, values, self)
                res_norm = residual.norm()
                #debug.fmsg("Current residual: [%s] (%g)" %(residual, res_norm))
                # debug.fmsg("new residual =", res_norm)
                prog.setMessage("%g/%g" % (res_norm, target_res))
                prog.setFraction(res_norm)

                i += 1
                # end of Newton iterations

            if prog.stopped():
                prog.setMessage("Newton solver interrupted")
                #progress.finish()
                raise ooferror2.ErrInterrupted();
        finally:
            prog.finish()
        # raise error if Newton's method did not converge in maximum_iterations
        if i >= self.maximum_iterations and res_norm > target_res:
            raise ooferror2.ErrConvergenceFailure(
                'Nonlinear solver - Newton iterations', self.maximum_iterations)
        # debug.fmsg("final values=", values)
        # debug.fmsg("-------------------")
        return i, res_norm
Ejemplo n.º 51
0
    def solve(self, matrix_method, precompute, compute_residual,
              compute_jacobian, compute_linear_coef_mtx, data, values):
        # initialize: set soln = startValues, update = 0
        # debug.fmsg("-----------------------------")
        # debug.fmsg("initial values=", values.norm())
        n = values.size()
        update   = doublevec.DoubleVec(n)
        # residual = doublevec.DoubleVec(n)

        # compute the residual = -K*startValues + rhs
        self.requireResidual(True)
        self.requireJacobian(False)
        precompute(data, values, self)
        residual = compute_residual(data, values, self)

        res_norm0 = residual.norm() # this is the norm of the initial residual
        # debug.fmsg("res_norm0=%g:" % res_norm0)
        res_norm  = res_norm0       # we will keep comparing current residual
                                    # with res_norm0 to judge convergence

        target_res = self.relative_tolerance*res_norm0 + self.absolute_tolerance
        prog = progress.getProgress('Picard Solver', progress.LOGDEFINITE)
        if res_norm > target_res:
            prog.setRange(res_norm0, target_res)

        try:
            # compute Picard updates while residual is large and
            # self.maximum_iterations is not exceeded
            s = 1.0
            i = 0
            while (res_norm > target_res and i < self.maximum_iterations
                   and not prog.stopped()):
                # debug.fmsg("iteration %d" % i)
                update.zero()  # start with a zero update vector

                # solve for the Picard step:  K * update = -residual
                K = compute_linear_coef_mtx(data, self)
                residual *= -1.0

                # debug.fmsg("residual=", residual.norm())
                matrix_method.solve( K, residual, update )
                # debug.fmsg("update=", update.norm())

                # choose step size for the Picard update
                s, residual = self.choose_step_size(
                    data, values, update, res_norm,
                    precompute, compute_residual)
                # debug.fmsg("line search s=", s)

                # correct the soln with the Picard update
                values += s * update
                res_norm = residual.norm()
                if res_norm <= target_res:
                    break

                # update the linear system
                self.requireResidual(True)
                self.requireJacobian(False)
                precompute(data, values, self)

                # compute the residual
                residual = compute_residual(data, values, self)
                res_norm = residual.norm()
                # debug.fmsg("Current residual:", res_norm)
                prog.setMessage("%g/%g" % (res_norm, target_res))
                prog.setFraction(res_norm)

                i = i+1
                # end of Picard iterations

            if prog.stopped():
                prog.setMessage("Picard solver interrupted")
                #prog.finish()
                raise ooferror2.ErrInterrupted()
        finally:
            prog.finish()

        # debug.fmsg("Done: res_norm=%g" % res_norm)
        # raise error if Picard's iterations did not converge in
        # maximum_iterations
        if i >= self.maximum_iterations and res_norm > target_res:
             raise ooferror2.ErrConvergenceFailure(
                 'Nonlinear solver - Picard iterations',
                 self.maximum_iterations)
        return i, res_norm