Ejemplo n.º 1
0
    def test_super_methods_merged(self):
        '''Checks that all signatures on a class' methods are found, not just the first for a name

        Bug #628315'''
        synchList = Collections.synchronizedList(ArrayList())
        synchList.add("a string")
        self.assertEquals("a string", synchList.remove(0))
Ejemplo n.º 2
0
    def test_super_methods_merged(self):
        '''Checks that all signatures on a class' methods are found, not just the first for a name

        Bug #628315'''
        synchList = Collections.synchronizedList(ArrayList())
        synchList.add("a string")
        self.assertEquals("a string", synchList.remove(0))
Ejemplo n.º 3
0
    def run(self):
        # sanity check:
        if len(self.sources) == 0: raise Exception("No sources defined")
        if len(self.sinks) == 0: raise Exception("No sinks defined")

        # create a plan:
        specs = []
        pipemap = {}
        for sink in self.sinks:
            spec = JobSpec(self._jobid(), self.workpath)
            spec.outputpath = sink.sinkpath
            spec.outputformat = sink.outputformat
            spec.outputJson = sink.json
            spec.compressoutput = sink.compressoutput
            spec.compressiontype = sink.compressiontype
            specs.append(spec)
            if len(sink.sources) != 1: raise Exception("Sinks can only have one source: " + sink)
            self._walkPipe(spec, sink.sources[0], specs, pipemap)

        # sort out paths for jobs:
        self._configureJobs(specs)

        # run jobs:
        _log.info("Working directory is " + self.workpath)
        _log.info(str(len(specs)) + " job(s) found from " + str(len(self.pipes)) + " pipe action(s)")
        happy.dfs.delete(self.workpath)
        jobsDone = Collections.synchronizedSet(HashSet())
        jobResults = Collections.synchronizedList(ArrayList())
        jobsStarted = sets.Set()
        while jobsDone.size() < len(specs):
            # only keep 3 jobs in flight:
            for spec in specs:
                id = spec.id
                if id not in jobsStarted:
                    parentIds = [parent.id for parent in spec.parents]
                    if jobsDone.containsAll(parentIds):
                        thread = threading.Thread(name="Cloud Job " + str(id), target=self._runJob, args=(spec.getJob(), id, jobsDone, jobResults))
                        thread.setDaemon(True)
                        thread.start()
                        jobsStarted.add(id)
                if len(jobsStarted) - jobsDone.size() >= 3: break
            time.sleep(1)
        # compile results:
        results = {}
        for result in jobResults:
            for key, value in result.iteritems():
                results.setdefault(key, []).extend(value)
        # check for errors:
        if self.hasErrors():
            totalErrors = sum(results["happy.cloud.dataerrors"])
            _log.error("*** " + str(totalErrors) + " DataException errors were caught during this run, look in " + \
                self.workpath + "/errors to see details ***")
        return results
Ejemplo n.º 4
0
    def createWalkableZones():
        MeshMaker.ledgeTiles = MeshMaker.getLedgeTiles()
       
        if len(MeshMaker.ledgeTiles) == 0:
            raise ValueError('No ledge tiles found on this map!')
        
        MeshMaker.ledgeTiles = MeshMaker.tileListSorter(
            MeshMaker.ledgeTiles)

        ledgeTiles2 = Collections.synchronizedList(MeshMaker.ledgeTiles)
        MeshMaker.walkableTileList = MeshMaker.splitWalkableZones(
            ledgeTiles2)
        MeshMaker.walkableTilesMap = {}
        
        for tl in MeshMaker.walkableTileList:
            s = MeshMaker.makeWalkShape(tl[0], tl[-1])
            MeshMaker.walkableZones.append(s)
            MeshMaker.walkableTilesMap[s] = tl
Ejemplo n.º 5
0
from __future__ import print_function
import sys, traceback
from synchronize import make_synchronized
from java.util.concurrent import Callable, Future, Executors, ThreadFactory, TimeUnit
from java.util.concurrent.atomic import AtomicInteger
from java.lang.reflect.Array import newInstance as newArray
from java.lang import Runtime, Thread, Double, Float, Byte, Short, Integer, Long, Boolean, Character, System, Runnable
from net.imglib2.realtransform import AffineTransform3D
from net.imglib2.view import Views
from java.util import LinkedHashMap, Collections, LinkedList, HashMap
from java.lang.ref import SoftReference
from java.util.concurrent.locks import ReentrantLock

printService = Executors.newSingleThreadScheduledExecutor()
msgQueue = Collections.synchronizedList(LinkedList())  # synchronized


def printMsgQueue():
    while not msgQueue.isEmpty():
        try:
            print(msgQueue.pop())
        except:
            System.out.println(str(sys.exc_info()))


class Printer(Runnable):
    def __init__(self, stdout):
        self.stdout = sys.stdout

    def run(self):
        while not msgQueue.isEmpty():
Ejemplo n.º 6
0
			outfile.write('"'+self.formatter.print(timestamp) + '","'+str(total)+'","'+str(throughput)+'"')
			outfile.flush()
			i = i + 1
	def complete(self):
		self._exit = True

# Main
threadCount = 2
if len(sys.argv) > 1:
	threadCount = int(sys.argv[1])
iterations = 3000000
main = Thread.currentThread()
print "Sleeping for 5 seconds"
main.sleep(5000)

resultList = Collections.synchronizedList(ArrayList())
start = System.currentTimeMillis()
threadList = ArrayList()
i = 0
ai = AtomicInteger(0)
while i < threadCount:
	threadName = "Pooled Thread " + str(i)
	t = FibThread(name=threadName, i=iterations, r=resultList)
	t.start()
	threadList.add(t)
	i = i + 1
resultThread = ResultThread(r=resultList, pool=threadList)
resultThread.start()
print str(threadCount) + " Threads started ..."
for t in threadList:
	t.join()