Exemplo n.º 1
0
    def __init__(self, vdm, offlineCache):
        Stoppable.Stoppable.__init__(self)
        self.dependencies_ = TwoWaySetMap.TwoWaySetMap()
        self.vdm_ = vdm
        self.offlineCache_ = offlineCache
        self.finishedValuesAndTimeElapsed_ = {}
        self.intermediates_ = {}
        self.lock_ = threading.RLock()
        self.completable_ = Queue.Queue()
        self.timesComputed = 0
        self.computingContexts_ = {}
        self.computingContexts_t0_ = {}
        self.isSplit_ = set()
        self.watchers_ = {}
        self.contexts_ = []

        self.inProcessDownloader = (
            OutOfProcessDownloader.OutOfProcessDownloaderPool(
                Setup.config().cumulusServiceThreadCount,
                actuallyRunOutOfProcess = False
                )
            )

        self.threads_ = []
        self.isActive = True
        #setup the primary cache object, and set its worker threads going
        for threadIx in range(Setup.config().cumulusServiceThreadCount):
            workerThread = ManagedThread.ManagedThread(target = self.threadWorker)
            workerThread.start()
            self.threads_.append(workerThread)
Exemplo n.º 2
0
    def __init__(self):
        callbackSchedulerFactory = CallbackScheduler.createSimpleCallbackSchedulerFactory(
        )
        self.callbackScheduler = callbackSchedulerFactory.createScheduler(
            "Simulator", 1)

        self.uforaPath = os.path.abspath(
            os.path.join(os.path.dirname(__file__), '../'))

        self.sharedStatePath = os.path.join(self.uforaPath,
                                            'distributed/SharedState')
        self.sharedStateMainline = os.path.join(self.sharedStatePath,
                                                'sharedStateMainline.py')

        self.gatewayServiceMainline = os.path.join(
            self.uforaPath, 'scripts/init/ufora-gateway.py')

        self.webPath = os.path.join(self.uforaPath, 'web/relay')
        self.relayScript = os.path.join(self.webPath, 'server.coffee')

        self.relayPort = Setup.config().relayPort
        self.relayHttpsPort = Setup.config().relayHttpsPort
        self.sharedStatePort = Setup.config().sharedStatePort
        self.restApiPort = Setup.config().restApiPort
        self.subscribableWebObjectsPort = Setup.config(
        ).subscribableWebObjectsPort

        #create an OutOfProcessDownloader so we can execute commands like 'forever'
        #from there, instead of forking from the main process (which can run out of memory)
        self.processPool = OutOfProcessDownloader.OutOfProcessDownloaderPool(1)

        self.desirePublisher = None
        self._connectionManager = None
Exemplo n.º 3
0
    def test_exception(self):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(1)

        queue = Queue.Queue()

        with self.assertRaises(AssertionError):
            pool.getDownloader().executeAndCallbackWithString(
                assertsFalse, queue.put)

        pool.teardown()
    def test_in_process_looping(self):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(
            1, actuallyRunOutOfProcess=False)

        queue = Queue.Queue()

        for ix in xrange(10):
            pool.getDownloader().executeAndCallbackWithString(
                returnsAString, queue.put)
            self.assertEqual(queue.get(), "asdf")

        pool.teardown()
Exemplo n.º 5
0
    def test_execute(self):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(1)

        queue = Queue.Queue()

        pool.getDownloader().executeAndCallbackWithString(
            returnsAString, queue.put)
        self.assertEqual(queue.get(), "asdf")

        pool.getDownloader().executeAndCallbackWithString(
            DoublesString("haro"), queue.put)
        self.assertEqual(queue.get(), "haroharo")

        pool.teardown()
Exemplo n.º 6
0
    def verifyCallableWithInput(self, actuallyRunOutOfProcess=True):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(
            1, actuallyRunOutOfProcess)
        try:
            queue = Queue.Queue()

            toEcho = "x" * 100000

            def writeInput(fd):
                os.write(fd, common.prependSize(toEcho))

            pool.getDownloader().executeAndCallbackWithString(
                echoInput, queue.put, writeInput)
            self.assertEqual(queue.get(), toEcho)
        finally:
            pool.teardown()
Exemplo n.º 7
0
    def test_throughput(self):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(1)

        queue = Queue.Queue()

        t0 = time.time()
        ix = 0
        while time.time() - t0 < 2.0:
            pool.getDownloader().executeAndCallbackWithString(
                DoublesString(str(ix)), queue.put)
            self.assertEqual(queue.get(), str(ix) * 2)
            ix = ix + 1

        logging.info("Executed %s out-of-process callbacks/second", ix / 2.0)

        #on the machine in my office we get 20,000/sec. this is a pretty conservative estimate.
        self.assertTrue(ix > 100)

        pool.teardown()
    def test_subprocess_dies(self):
        pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(
            1, actuallyRunOutOfProcess=True)

        queue = Queue.Queue()

        for ix in xrange(20):
            try:
                pool.getDownloader().executeAndCallbackWithString(
                    killSelf, lambda s: s)
            except:
                pass

            pool.getDownloader().executeAndCallbackWithString(
                returnsAString, queue.put)

        pool.teardown()

        self.assertTrue(queue.qsize(), 20)
Exemplo n.º 9
0
    def __init__(self,
                 s3Interface,
                 objectStore,
                 vdm,
                 datasetRequestChannel,
                 threadCount=None,
                 maxObjectStoreAttempts=None,
                 objectStoreFailureIntervalSeconds=None):
        object.__init__(self)

        self.s3Interface = s3Interface
        self.objectStore = objectStore
        self.maxObjectStoreAttempts = Setup.config().objectStoreMaxAttempts \
            if maxObjectStoreAttempts is None \
            else max(1, maxObjectStoreAttempts)
        self.objectStoreFailureIntervalSeconds = Setup.config().objectStoreFailureIntervalSeconds \
            if objectStoreFailureIntervalSeconds is None \
            else objectStoreFailureIntervalSeconds
        self.objectStoreFailureCount = 0
        self.lastSuccessfulObjectStoreAttempt = 0.0
        self.vdm_ = vdm
        self.datasetRequestChannel_ = datasetRequestChannel
        self.threads_ = []
        self.teardown_ = False
        self.lock_ = threading.Lock()
        self.totalTasks = 0
        self.threadcount = threadCount or Setup.config(
        ).externalDatasetLoaderServiceThreads

        logging.debug(
            "OutOfProcessDownloader is %s",
            "out of process" if s3Interface.isCompatibleWithOutOfProcessDownloadPool else \
                "in memory"
            )

        self.outOfProcessDownloaderPool = \
            OutOfProcessDownloader.OutOfProcessDownloaderPool(
                self.threadcount,
                #for the inmemory tests, we can't run out of process because the fork may happen
                #before we populate the memory abstraction
                actuallyRunOutOfProcess=s3Interface.isCompatibleWithOutOfProcessDownloadPool
                )
Exemplo n.º 10
0
 def test_basic(self):
     pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(1)
     pool.teardown()
Exemplo n.º 11
0
 def test_basic_out(self):
     pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(
         1, actuallyRunOutOfProcess=True)
     pool.teardown()
Exemplo n.º 12
0
 def test_basic_in(self):
     pool = OutOfProcessDownloader.OutOfProcessDownloaderPool(
         1, actuallyRunOutOfProcess=False)
     pool.teardown()