Example #1
0
    def test_usage(self):
        import sys

        ticks = sys.getcheckinterval()

        with self.assertRaisesRegexp(RuntimeError, "timeout"):
            start = time.time()
            with timeout(0.01, 0.01):
                time.sleep(1)
        self.assertLess(time.time() - start, 1)
        self.assertEqual(ticks, sys.getcheckinterval())

        with self.assertRaises(KeyboardInterrupt):
            with timeout(0.01, 0.01):
                raise KeyboardInterrupt
        self.assertEqual(ticks, sys.getcheckinterval())

        with self.assertRaisesRegexp(RuntimeError, "timeout"):
            start = time.time()
            with timeout(0.01, 0.01, 10):
                while True:
                    pass
        self.assertLess(time.time() - start, 0.03)
        self.assertEqual(ticks, sys.getcheckinterval())

        # 0 for forever
        with timeout(0, 0.001):
            time.sleep(0.01)
        self.assertEqual(ticks, sys.getcheckinterval())
Example #2
0
 def test_setcheckinterval(self):
     import sys
     raises(TypeError, sys.setcheckinterval)
     orig = sys.getcheckinterval()
     for n in 0, 100, 120, orig: # orig last to restore starting state
         sys.setcheckinterval(n)
         assert sys.getcheckinterval() == n
Example #3
0
 def test_setcheckinterval(self):
     if test.test_support.due_to_ironpython_bug("http://tkbgitvstfat01:8080/WorkItemTracking/WorkItem.aspx?artifactMoniker=148342"):
         return
     self.assertRaises(TypeError, sys.setcheckinterval)
     orig = sys.getcheckinterval()
     for n in 0, 100, 120, orig: # orig last to restore starting state
         sys.setcheckinterval(n)
         self.assertEquals(sys.getcheckinterval(), n)
 def test_setcheckinterval(self):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         self.assertRaises(TypeError, sys.setcheckinterval)
         orig = sys.getcheckinterval()
         for n in 0, 100, 120, orig: # orig last to restore starting state
             sys.setcheckinterval(n)
             self.assertEqual(sys.getcheckinterval(), n)
Example #5
0
def TimeTest002():
    import time
    import sys
    print sys.getcheckinterval()
    for i in range (100):
         # now = time.time()
         now = time.clock()
         # b = time.__doc__()
         print i, " - ", now
    print 'STOP'
Example #6
0
 def testConfigureInterpreter(self):
     oldcheckinterval = sys.getcheckinterval()
     newcheckinterval = oldcheckinterval + 1
     conf = self.load_config_text("""
                 instancehome <<INSTANCE_HOME>>
                 python-check-interval %d
                 """ % newcheckinterval)
     try:
         starter = self.get_starter(conf)
         starter.setupInterpreter()
         self.assertEqual(sys.getcheckinterval(), newcheckinterval)
     finally:
         sys.setcheckinterval(oldcheckinterval)
Example #7
0
 def atomic(new_checkinterval=sys.getcheckinterval(),
            maxint=sys.maxint,
            getcheckinterval=sys.getcheckinterval,
            setcheckinterval=sys.setcheckinterval):
     setcheckinterval(maxint)
     try:
         reset_check_interval = True
         if ctypes.addressof(P_GIL.contents) == ctypes.addressof(GIL):
             lock = thread.allocate_lock()
             lock.acquire()
             gil = P_GIL.contents
             P_GIL.contents = get_pointer_to_lock(lock)
             try:
                 setcheckinterval(new_checkinterval)
                 reset_check_interval = False
                 yield True
             finally:
                 P_GIL.contents = gil
         else:
             setcheckinterval(new_checkinterval)
             reset_check_interval = False
             yield True
     finally:
         if reset_check_interval:
             setcheckinterval(new_checkinterval)
Example #8
0
    def inject_jump(self, where, dest):
        """
        Monkeypatch bytecode at ``where`` to force it to jump to ``dest``.

        Returns function which puts things back how they were.
        """
        # We're about to do dangerous things to a functions code content.
        # We can't make a lock to prevent the interpreter from using those
        # bytes, so the best we can do is to set the check interval to be high
        # and just pray that this keeps other threads at bay.
        old_check_interval = sys.getcheckinterval()
        sys.setcheckinterval(2**20)

        pb = ctypes.pointer(self.ob_sval)
        orig_bytes = [pb[where+i][0] for i in xrange(where)]

        v = struct.pack("<BH", opcode.opmap["JUMP_ABSOLUTE"], dest)

        # Overwrite code to cause it to jump to the target
        for i in xrange(3):
            pb[where+i][0] = ord(v[i])

        def tidy_up():
            """
            Put the bytecode back how it was. Good as new.
            """
            sys.setcheckinterval(old_check_interval)
            for i in xrange(3):
                pb[where+i][0] = orig_bytes[i]

        return tidy_up
Example #9
0
def xdump(path, show_scheme=True, show_data=True):
    # print "query_res " + str(xquery_res)
    xobj, scheme, ret_type = list_path(path)
    if xobj is None:
        return None
    if ret_type == "DIR":
        ret_fields = [['dir']]
        for (son_dir_name, son_dir) in xobj.items():
            ret_fields.append([add_cross_if_dir(son_dir_name, son_dir)])
        return ret_fields
    ret_fields = list()
    if show_scheme:
        ret_fields.append(list(scheme.keys()))
    if ret_type == "LOGS":
        ret_fields.extend(xobj)
        return ret_fields
    def_interval = sys.getcheckinterval()
    # TODO: maybe copy before and no need to lock?
    sys.setcheckinterval(1000000000)
    try:
        ret_fields.extend(decompose_fields(xobj,
                                           show_scheme=False,
                                           show_data=show_data))
    except Exception as e:
        raise e
    finally:
        sys.setcheckinterval(def_interval)
    return ret_fields
Example #10
0
def get_py_internals():
    py_internals = []
    if hasattr(sys, 'builtin_module_names'):
        py_internals.append(
            ('Built-in Modules', ', '.join(sys.builtin_module_names)))
    py_internals.append(('Byte Order', sys.byteorder + ' endian'))

    if hasattr(sys, 'getcheckinterval'):
        py_internals.append(('Check Interval', sys.getcheckinterval()))

    if hasattr(sys, 'getfilesystemencoding'):
        py_internals.append(
            ('File System Encoding', sys.getfilesystemencoding()))

    max_integer_size = str(sys.maxsize) + ' (%s)' % \
        hex(sys.maxsize).upper()
    py_internals.append(('Maximum Integer Size', max_integer_size))

    if hasattr(sys, 'getrecursionlimit'):
        py_internals.append(('Maximum Recursion Depth',
                            sys.getrecursionlimit()))

    if hasattr(sys, 'tracebacklimit'):
        traceback_limit = sys.tracebacklimit
    else:
        traceback_limit = 1000
    py_internals.append(('Maximum Traceback Limit', traceback_limit))

    py_internals.append(('Maximum Code Point', sys.maxunicode))
    return py_internals
Example #11
0
def diag_encoding(ct):

    ct.h2("System Encodings")
    
    s="    locale.getdefaultlocale(): "\
       + repr(locale.getdefaultlocale())

    s+="\n    sys.getdefaultencoding() : "+ sys.getdefaultencoding()
    s+="\n    sys.getfilesystemencoding() : " + sys.getfilesystemencoding()
    s+="\n    sys.stdout.encoding : "
    try:
        s+=str(sys.stdout.encoding)
    except AttributeError:
        s+=("(undefined)")
    s+="\n    sys.stdin.encoding : "
    try:
        s+=str(sys.stdin.encoding)
    except AttributeError:
        s+="(undefined)"
    s+="\n    sys.getcheckinterval() : %r " % sys.getcheckinterval()
    s+="\n    sys.getwindowsversion() : %r " % (sys.getwindowsversion(),)
    
    
    s+="\n"
    ct.pre(s)
Example #12
0
    def __call__(self, *args, **kwargs):
        """
        Execute the callback.
        """
        if self.finished:
            # We're finished before we even started.  The only sane reason for
            # this is that the we were aborted, so check for for this, and if
            # it's not the case, log an error.
            if self.failed and self._exception[0] == InProgressAborted:
                # Aborted, fine.
                return

            # This shouldn't happen.  If it does, it's certainly an error
            # condition.  But as we are inside the thread now and already
            # finished, we can't really raise an exception.  So logging the
            # error will have to suffice.
            log.error('Attempting to start thread which has already finished')

        if self._callback is None:
            # Attempting to invoke multiple times?  Shouldn't happen.
            return None

        try:
            result = self._callback()
            # Kludge alert: InProgressAborted gets raised asynchronously inside
            # the thread.  Assuming it doesn't inadvertently get cleared out
            # by PyErr_Clear(), it may take up to check-interval bytecodes for
            # it to trigger.  So we do a dummy loop to chew up that many byte
            # codes (roughly) to cause any pending async InProgressAborted to
            # raise here, which we'll catch next.  The overhead added by this
            # loop is negligible.  [About 10us on my system]
            for i in xrange(sys.getcheckinterval()):
                pass
        except InProgressAborted:
            # InProgressAborted was raised inside the thread (from the InProgress
            # abort handler).  This means we're already finished, so there's no
            # need to do anything further.
            pass
        except:
            # FIXME: should we really be catching KeyboardInterrupt and SystemExit?
            MainThreadCallback(self.throw)(*sys.exc_info())
        else:
            if type(result) == types.GeneratorType or isinstance(result, InProgress):
                # Looks like the callback is yielding something, or callback is a
                # coroutine-decorated function.  Not supported (yet?).  In the
                # case of coroutines, the first entry will execute in the
                # thread, but subsequent entries (via the generator's next())
                # will be from the mainthread, which is almost certainly _not_
                # what is intended by threading a coroutine.
                log.warning('NYI: coroutines cannot (yet) be executed in threads.')

            # If we're finished, it means we were aborted, but probably caught the
            # InProgressAborted inside the threaded callback.  If so, we discard the
            # return value from the callback, as we're considered finished.  Otherwise
            # finish up in the mainthread.
            if not self.finished:
                MainThreadCallback(self.finish)(result)

        self._callback = None
Example #13
0
 def _collect(self):
     gc.collect()
     check_interval = sys.getcheckinterval()
     sys.setcheckinterval(sys.maxint)
     try:
         return {id(object) for object in gc.get_objects() if not isinstance(object, EXCLUDE_TYPES)}
     finally:
         sys.setcheckinterval(check_interval)
Example #14
0
def short_checkinterval(request):
    """
    Sets a small interval using sys.setcheckinterval to cause many context
    switches.
    """
    old_interval = sys.getcheckinterval()
    sys.setcheckinterval(0)
    request.addfinalizer(lambda: sys.setcheckinterval(old_interval))
Example #15
0
    def run(self):
        pthread_setname_np(self.ident, "Manhole ----")

        client = self.client
        client.settimeout(None)
        pid, uid, gid = get_peercred(client)
        euid = os.geteuid()
        client_name = "PID:%s UID:%s GID:%s" % (pid, uid, gid)
        if uid not in (0, euid):
            raise SuspiciousClient(
                "Can't accept client with %s. "
                "It doesn't match the current EUID:%s or ROOT." % (
                    client_name, euid
            ))

        cry("Accepted connection %s from %s" % (client, client_name))
        pthread_setname_np(self.ident, "Manhole %s" % pid)
        client.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 0)
        client.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 0)
        backup = []
        try:
            client_fd = client.fileno()
            for mode, names in (
                ('w', (
                    'stderr',
                    'stdout',
                    '__stderr__',
                    '__stdout__'
                )),
                ('r', (
                    'stdin',
                    '__stdin__'
                ))
            ):
                for name in names:
                    backup.append((name, getattr(sys, name)))
                    setattr(sys, name, os.fdopen(client_fd, mode, 0))

            run_repl()
            cry("DONE.")
        finally:
            cry("Cleaning up.")
            old_interval = sys.getcheckinterval()
            sys.setcheckinterval(2147483647)
            junk = [] # keep the old file objects alive for a bit
            for name, fh in backup:
                junk.append(getattr(sys, name))
                setattr(sys, name, fh)
            del backup
            for fh in junk:
                try:
                    fh.close()
                except IOError:
                    pass
                del fh
            del junk
            self.client = None
            sys.setcheckinterval(old_interval)
    def test_trashcan_threads(self):
        # Issue #13992: trashcan mechanism should be thread-safe
        NESTING = 60
        N_THREADS = 2

        def sleeper_gen():
            """A generator that releases the GIL when closed or dealloc'ed."""
            try:
                yield
            finally:
                time.sleep(0.000001)

        class C(list):
            # Appending to a list is atomic, which avoids the use of a lock.
            inits = []
            dels = []
            def __init__(self, alist):
                self[:] = alist
                C.inits.append(None)
            def __del__(self):
                # This __del__ is called by subtype_dealloc().
                C.dels.append(None)
                # `g` will release the GIL when garbage-collected.  This
                # helps assert subtype_dealloc's behaviour when threads
                # switch in the middle of it.
                g = sleeper_gen()
                next(g)
                # Now that __del__ is finished, subtype_dealloc will proceed
                # to call list_dealloc, which also uses the trashcan mechanism.

        def make_nested():
            """Create a sufficiently nested container object so that the
            trashcan mechanism is invoked when deallocating it."""
            x = C([])
            for i in range(NESTING):
                x = [C([x])]
            del x

        def run_thread():
            """Exercise make_nested() in a loop."""
            while not exit:
                make_nested()

        old_checkinterval = sys.getcheckinterval()
        sys.setcheckinterval(3)
        try:
            exit = []
            threads = []
            for i in range(N_THREADS):
                t = threading.Thread(target=run_thread)
                threads.append(t)
            with start_threads(threads, lambda: exit.append(1)):
                time.sleep(1.0)
        finally:
            sys.setcheckinterval(old_checkinterval)
        gc.collect()
        self.assertEqual(len(C.inits), len(C.dels))
def printCheckInterval(self):
    """ Get/set python check interval """
    request = self.REQUEST
    interval = request.get('interval', None)

    if interval:
        return sys.setcheckinterval(int(interval))
    else:
        return sys.getcheckinterval()
Example #18
0
 def setUp(self):
     # Set a very small check interval, this will make it more likely
     # that the interpreter crashes when threading is done incorrectly.
     if sys.version_info[:2] >= (3, 2):
         self._int = sys.getswitchinterval()
         sys.setswitchinterval(0.0000001)
     else:
         self._int = sys.getcheckinterval()
         sys.setcheckinterval(1)
Example #19
0
 def __unicode__(self):
     """Return a serialized version of this tree/branch."""
     
     ci = getcheckinterval()
     setcheckinterval(0)
     
     value = ''.join(self.render('utf8')).decode('utf8')
     
     setcheckinterval(ci)
     return value
Example #20
0
 def __unicode__(self):
     """Return a serialized version of this tree/branch."""
     
     # TODO: Determine how badly this effects things.
     ci = getcheckinterval()
     setcheckinterval(0)
     
     value = ''.join(self.render('utf8')).decode('utf8')
     
     setcheckinterval(ci)
     return value
Example #21
0
 def Persist(self, filename):
   """Persist all state to filename."""
   try:
     original_checkinterval = sys.getcheckinterval()
     sys.setcheckinterval(2**31-1)  # Lock out other threads so nothing can
                                    # modify |self| during pickling.
     pickled_self = cPickle.dumps(self, cPickle.HIGHEST_PROTOCOL)
   finally:
     sys.setcheckinterval(original_checkinterval)
   with open(filename, 'wb') as f:
     f.write(pickled_self)
Example #22
0
def safe_fork():
    old = sys.getcheckinterval()
    try:
        sys.setcheckinterval(sys.maxint)
        fork_result = os.fork()
        if not fork_result:
            # Child process: reinitialize threading structures
            reload(threading)
    finally:
        sys.setcheckinterval(old)
    return fork_result
Example #23
0
def _empty_async():
    # run this after changing the signal handler away from the iptyhon 0.10
    # one, in order to remove any possible queued PyThreadState_SetAsyncExc
    # exception
    # This function can be interrupted by any exception, but probably
    # by KeyboardInterrupt
    # We need to use the same inner loop as for test_async below
    # for the default check interval of 100, this functions
    #  takes 5.5 us on reuletlab4
    for i in range(sys.getcheckinterval()+50):
        pass
Example #24
0
    def test_thread_safety_during_modification(self):
        hosts = range(100)
        policy = RoundRobinPolicy()
        policy.populate(None, hosts)

        errors = []

        def check_query_plan():
            try:
                for i in xrange(100):
                    list(policy.make_query_plan())
            except Exception as exc:
                errors.append(exc)

        def host_up():
            for i in xrange(1000):
                policy.on_up(randint(0, 99))

        def host_down():
            for i in xrange(1000):
                policy.on_down(randint(0, 99))

        threads = []
        for i in range(5):
            threads.append(Thread(target=check_query_plan))
            threads.append(Thread(target=host_up))
            threads.append(Thread(target=host_down))

        # make the GIL switch after every instruction, maximizing
        # the chance of race conditions
        check = six.PY2 or '__pypy__' in sys.builtin_module_names
        if check:
            original_interval = sys.getcheckinterval()
        else:
            original_interval = sys.getswitchinterval()

        try:
            if check:
                sys.setcheckinterval(0)
            else:
                sys.setswitchinterval(0.0001)
            map(lambda t: t.start(), threads)
            map(lambda t: t.join(), threads)
        finally:
            if check:
                sys.setcheckinterval(original_interval)
            else:
                sys.setswitchinterval(original_interval)

        if errors:
            self.fail("Saw errors: %s" % (errors,))
 def test_pending_calls_race(self):
     # Issue #14406: multi-threaded race condition when waiting on all
     # futures.
     event = threading.Event()
     def future_func():
         event.wait()
     oldcheckinterval = sys.getcheckinterval()
     sys.setcheckinterval(1)
     try:
         fs = {self.executor.submit(future_func) for i in range(100)}
         event.set()
         futures.wait(fs, return_when=futures.ALL_COMPLETED)
     finally:
         sys.setcheckinterval(oldcheckinterval)
 def setUp(self):
     """
     Reduce the CPython check interval so that thread switches happen much
     more often, hopefully exercising more possible race conditions.  Also,
     delay actual test startup until the reactor has been started.
     """
     if _PY3:
         if getattr(sys, 'getswitchinterval', None) is not None:
             self.addCleanup(sys.setswitchinterval, sys.getswitchinterval())
             sys.setswitchinterval(0.0000001)
     else:
         if getattr(sys, 'getcheckinterval', None) is not None:
             self.addCleanup(sys.setcheckinterval, sys.getcheckinterval())
             sys.setcheckinterval(7)
 def _runThreads(self, *funcs):
     check_interval = sys.getcheckinterval()
     sys.setcheckinterval(1)
     try:
         errors = []
         threads = [self._makeThread(f, errors) for f in funcs]
         for t in threads:
             t.start()
         for t in threads:
             t.join()
         for (c, e, t) in errors:
             raise e, None, t
     finally:
         sys.setcheckinterval(check_interval)
Example #28
0
def reconfig():
    ''' Set global best configs, eg: gc and recursion limit '''
    cfg = {}
    cfg['sys.platform'] = sys.platform
    cfg['sys.maxsize'] = sys.maxsize
    cfg['sys.path'] = sys.path
    cfg['sys.excepthook'] = sys.excepthook
    cfg['old sys.checkinterval'] = sys.getcheckinterval()
    sys.setcheckinterval(BEST_CHECK_INTERVAL)

    cfg['new sys.checkinterval'] = sys.getcheckinterval()
    cfg['old sys.recursionlimit'] = sys.getrecursionlimit()
    sys.setrecursionlimit(BEST_RECURSION_LIMIT)

    cfg['new sys.recursionlimit'] = sys.getrecursionlimit()
    cfg['old gc.threshold'] = str(gc.get_threshold())
    gc.set_threshold(512, 8, 6)

    cfg['new gc.threshold'] = str(gc.get_threshold())
    sys._clear_type_cache()

    cfg['sys._clear_type_cache'] = True
    return cfg
Example #29
0
def section_py_internals():
	data = []
	if hasattr( sys, 'builtin_module_names' ):
		data += 'Built-in Modules', ', '.join( sys.builtin_module_names )
		data += 'Byte Order', sys.byteorder + ' endian'
	if hasattr( sys, 'getcheckinterval' ): data += 'Check Interval', sys.getcheckinterval()
	if hasattr(sys, 'getfilesystemencoding'):
		data += 'File System Encoding', sys.getfilesystemencoding()
		data += 'Maximum Integer Size', str( sys.maxsize ) + ' (%s)' % str( hex( sys.maxsize ) ).upper().replace( "X", "x" )
	if hasattr( sys, 'getrecursionlimit' ): data += 'Maximum Recursion Depth', sys.getrecursionlimit()
	if hasattr( sys, 'tracebacklimit' ): tabdatale += 'Maximum Traceback Limit', sys.tracebacklimit
	else:
		data += 'Maximum Traceback Limit', '1000'
		data += 'Maximum Unicode Code Point', sys.maxunicode
	return table_row( data )
Example #30
0
 def test_lock_function(self):
     old_value = sys.getcheckinterval()
     self.addCleanup(sys.setcheckinterval, old_value)
     lock_status = _pyheapdump.lock_function()
     self.addCleanup(lock_status.unlock)
     self.assertTrue(lock_status.is_locked)
     self.assertFalse(lock_status.has_io_lock)
     with lock_status.context_with_io():
         self.assertFalse(lock_status.is_locked)
         self.assertTrue(lock_status.has_io_lock)
     self.assertFalse(lock_status.is_locked)
     self.assertTrue(lock_status.has_io_lock)
     lock_status.unlock()
     self.assertFalse(lock_status.is_locked)
     self.assertFalse(lock_status.has_io_lock)
Example #31
0
def section_py_internals():
    data = []
    if hasattr(sys, 'builtin_module_names'):
        data += 'Built-in Modules', ', '.join(sys.builtin_module_names)
    data += 'Byte Order', sys.byteorder + ' endian'
    if hasattr(sys, 'getcheckinterval'):
        data += 'Check Interval', sys.getcheckinterval()
    if hasattr(sys, 'getfilesystemencoding'):
        data += 'File System Encoding', sys.getfilesystemencoding()
    data += 'Maximum Integer Size', str(sys.maxsize) + ' (%s)' % str(
        hex(sys.maxsize)).upper().replace("X", "x")
    if hasattr(sys, 'getrecursionlimit'):
        data += 'Maximum Recursion Depth', sys.getrecursionlimit()
    if hasattr(sys, 'tracebacklimit'):
        tabdatale += 'Maximum Traceback Limit', sys.tracebacklimit
    else:
        data += 'Maximum Traceback Limit', '1000'
    data += 'Maximum Unicode Code Point', sys.maxunicode
    return makecells(data)
Example #32
0
def frequent_thread_switches():
    """Make concurrency bugs more likely to manifest."""
    interval = None
    if not sys.platform.startswith('java'):
        if hasattr(sys, 'getswitchinterval'):
            interval = sys.getswitchinterval()
            sys.setswitchinterval(1e-6)
        else:
            interval = sys.getcheckinterval()
            sys.setcheckinterval(1)

    try:
        yield
    finally:
        if not sys.platform.startswith('java'):
            if hasattr(sys, 'setswitchinterval'):
                sys.setswitchinterval(interval)
            else:
                sys.setcheckinterval(interval)
Example #33
0
def lazy_client_trial(reset, target, test, get_client, use_greenlets):
    """Test concurrent operations on a lazily-connecting client.

    `reset` takes a collection and resets it for the next trial.

    `target` takes a lazily-connecting collection and an index from
    0 to NTHREADS, and performs some operation, e.g. an insert.

    `test` takes the lazily-connecting collection and asserts a
    post-condition to prove `target` succeeded.
    """
    if use_greenlets and not has_gevent:
        raise SkipTest('Gevent not installed')

    collection = MongoClient(host, port).pymongo_test.test

    # Make concurrency bugs more likely to manifest.
    interval = None
    if not sys.platform.startswith('java'):
        if sys.version_info >= (3, 2):
            interval = sys.getswitchinterval()
            sys.setswitchinterval(1e-6)
        else:
            interval = sys.getcheckinterval()
            sys.setcheckinterval(1)

    try:
        for i in range(NTRIALS):
            reset(collection)
            lazy_client = get_client(_connect=False,
                                     use_greenlets=use_greenlets)

            lazy_collection = lazy_client.pymongo_test.test
            run_threads(lazy_collection, target, use_greenlets)
            test(lazy_collection)

    finally:
        if not sys.platform.startswith('java'):
            if sys.version_info >= (3, 2):
                sys.setswitchinterval(interval)
            else:
                sys.setcheckinterval(interval)
Example #34
0
def _inject_jump(self, where, dest):
    """
    Monkeypatch bytecode at ``where`` to force it to jump to ``dest``.

    Returns function which puts things back to how they were.
    """
    # We're about to do dangerous things to a function's code content.
    # We can't make a lock to prevent the interpreter from using those
    # bytes, so the best we can do is to set the check interval to be high
    # and just pray that this keeps other threads at bay.
    if sys.version_info[0] < 3:
        old_check_interval = sys.getcheckinterval()
        sys.setcheckinterval(2**20)
    else:
        old_check_interval = sys.getswitchinterval()
        sys.setswitchinterval(1000)

    pb = ctypes.pointer(self.ob_sval)
    orig_bytes = [pb[where + i][0] for i in range(3)]

    v = struct.pack("<BH", opcode.opmap["JUMP_ABSOLUTE"], dest)

    # Overwrite code to cause it to jump to the target
    if sys.version_info[0] < 3:
        for i in range(3):
            pb[where + i][0] = ord(v[i])
    else:
        for i in range(3):
            pb[where + i][0] = v[i]

    def tidy_up():
        """
        Put the bytecode back to how it was. Good as new.
        """
        if sys.version_info[0] < 3:
            sys.setcheckinterval(old_check_interval)
        else:
            sys.setswitchinterval(old_check_interval)
        for i in range(3):
            pb[where + i][0] = orig_bytes[i]

    return tidy_up
Example #35
0
    def trial(self, reset, target, test):
        """Test concurrent operations on a lazily-connecting client.

        `reset` takes a collection and resets it for the next trial.

        `target` takes a lazily-connecting collection and an index from
        0 to nthreads, and performs some operation, e.g. an insert.

        `test` takes a collection and asserts a post-condition to prove
        `target` succeeded.
        """
        if self.use_greenlets and not has_gevent:
            raise SkipTest('Gevent not installed')

        collection = self._get_client().pymongo_test.test

        # Make concurrency bugs more likely to manifest.
        if not sys.platform.startswith('java'):
            if PY3:
                self.interval = sys.getswitchinterval()
                sys.setswitchinterval(1e-6)
            else:
                self.interval = sys.getcheckinterval()
                sys.setcheckinterval(1)

        try:
            for i in range(self.ntrials):
                reset(collection)
                lazy_client = self._get_client(
                    _connect=False, use_greenlets=self.use_greenlets)

                lazy_collection = lazy_client.pymongo_test.test
                self.run_threads(lazy_collection, target)
                test(collection)

        finally:
            if not sys.platform.startswith('java'):
                if PY3:
                    sys.setswitchinterval(self.interval)
                else:
                    sys.setcheckinterval(self.interval)
def section_py_internals():
    data = []
    if hasattr(sys, 'builtin_module_names'):
        data.append(('Built-in Modules', ', '.join(sys.builtin_module_names)))
    if hasattr(keyword, 'kwlist'):
        data.append(('Keywords List', ', '.join(keyword.kwlist)))
    data.append(('Python path', sys.path))
    data.append(('Byte Order', sys.byteorder + ' endian'))
    if hasattr(sys, 'getcheckinterval'):
        data.append(('Check Interval', sys.getcheckinterval()))
    if hasattr(sys, 'getfilesystemencoding'):
        data.append(('File System Encoding', sys.getfilesystemencoding()))
    data.append(('Maximum Integer Size', str(sys.maxsize) + ' (%s)' % str(hex(sys.maxsize)).upper().replace("X", "x")))
    if hasattr(sys, 'getrecursionlimit'):
        data.append(('Maximum Recursion Depth', sys.getrecursionlimit()))
    if hasattr(sys, 'tracebacklimit'):
        data.append(('Maximum Traceback Limit', sys.tracebacklimit))
    else:
        data.append(('Maximum Traceback Limit', '1000'))
    data.append(('Maximum Unicode Code Point', sys.maxunicode))
    return 'Python Internals', data
Example #37
0
def collect_py_internals():
    """Return python internal informations."""
    data = OrderedDict()
    if hasattr(sys, 'builtin_module_names'):
        data['Built-in Modules'] = ', '.join(sys.builtin_module_names)
    data['Byte Order'] = sys.byteorder + ' endian'
    if hasattr(sys, 'getcheckinterval'):
        data['Check Interval'] = sys.getcheckinterval()
    if hasattr(sys, 'getfilesystemencoding'):
        data['File System Encoding'] = sys.getfilesystemencoding()
    if _PYTHON_MAJOR < 3:
        hex_maxint = str(hex(sys.maxint)).upper().replace("X", "x")
        data['Maximum Integer Size'] = '{} ({})'.format(sys.maxint, hex_maxint)
    if hasattr(sys, 'getrecursionlimit'):
        data['Maximum Recursion Depth'] = sys.getrecursionlimit()
    if hasattr(sys, 'tracebacklimit'):
        data['Maximum Traceback Limit'] = sys.tracebacklimit
    else:
        data['Maximum Traceback Limit'] = '1000'
    data['Maximum Unicode Code Point'] = sys.maxunicode
    return data
 def test_enumerate_after_join(self):
     # Try hard to trigger #1703448: a thread is still returned in
     # threading.enumerate() after it has been join()ed.
     enum = threading.enumerate
     if not test.test_support.due_to_ironpython_bug("http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=1042"):
         old_interval = sys.getcheckinterval()
     try:
         for i in xrange(1, 100):
             # Try a couple times at each thread-switching interval
             # to get more interleavings.
             if not test.test_support.due_to_ironpython_bug("http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=1042"):
                 sys.setcheckinterval(i // 5)
             t = threading.Thread(target=lambda: None)
             t.start()
             t.join()
             l = enum()
             self.assertNotIn(t, l,
                 "#1703448 triggered after %d trials: %s" % (i, l))
     finally:
         if not test.test_support.due_to_ironpython_bug("http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=1042"):
             sys.setcheckinterval(old_interval)
Example #39
0
    def test_is_alive_after_fork(self):
        # Try hard to trigger #18418: is_alive() could sometimes be True on
        # threads that vanished after a fork.
        old_interval = sys.getcheckinterval()

        # Make the bug more likely to manifest.
        sys.setcheckinterval(10)

        try:
            for i in range(20):
                t = threading.Thread(target=lambda: None)
                t.start()
                pid = os.fork()
                if pid == 0:
                    os._exit(1 if t.is_alive() else 0)
                else:
                    t.join()
                    pid, status = os.waitpid(pid, 0)
                    self.assertEqual(0, status)
        finally:
            sys.setcheckinterval(old_interval)
Example #40
0
    def test_thread_safety_during_modification(self):
        hosts = range(100)
        policy = RoundRobinPolicy()
        policy.populate(None, hosts)

        errors = []

        def check_query_plan():
            try:
                for i in xrange(100):
                    list(policy.make_query_plan())
            except Exception as exc:
                errors.append(exc)

        def host_up():
            for i in xrange(1000):
                policy.on_up(randint(0, 99))

        def host_down():
            for i in xrange(1000):
                policy.on_down(randint(0, 99))

        threads = []
        for i in range(5):
            threads.append(Thread(target=check_query_plan))
            threads.append(Thread(target=host_up))
            threads.append(Thread(target=host_down))

        # make the GIL switch after every instruction, maximizing
        # the chace of race conditions
        original_interval = sys.getcheckinterval()
        try:
            sys.setcheckinterval(0)
            map(lambda t: t.start(), threads)
            map(lambda t: t.join(), threads)
        finally:
            sys.setcheckinterval(original_interval)

        if errors:
            self.fail("Saw errors: %s" % (errors, ))
Example #41
0
 def test_enumerate_after_join(self):
     # Try hard to trigger #1703448: a thread is still returned in
     # threading.enumerate() after it has been join()ed.
     enum = threading.enumerate
     import warnings
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', DeprecationWarning)
         # get/set checkinterval are deprecated in Python 3
         old_interval = sys.getcheckinterval()
         try:
             for i in xrange(1, 100):
                 # Try a couple times at each thread-switching interval
                 # to get more interleavings.
                 sys.setcheckinterval(i // 5)
                 t = threading.Thread(target=lambda: None)
                 t.start()
                 t.join()
                 l = enum()
                 self.assertFalse(t in l,
                                  "#1703448 triggered after %d trials: %s" % (i, l))
         finally:
             sys.setcheckinterval(old_interval)
Example #42
0
def get_pyvm_info():
    ret = {}
    ret['executable'] = sys.executable
    ret['is_64bit'] = IS_64BIT
    ret['version'] = sys.version
    ret['compiler'] = platform.python_compiler()
    ret['build_date'] = platform.python_build()[1]
    ret['version_info'] = list(sys.version_info)
    ret['have_ucs4'] = getattr(sys, 'maxunicode', 0) > 65536
    ret['have_readline'] = HAVE_READLINE

    try:
        ret['active_thread_count'] = len(sys._current_frames())
    except:
        ret['active_thread_count'] = None
    ret['recursion_limit'] = sys.getrecursionlimit()  # TODO: max_stack_depth?
    try:
        ret['gc'] = get_gc_info()
    except:
        pass
    ret['check_interval'] = sys.getcheckinterval()
    return ret
Example #43
0
 def test_async(n_inner=200, n_repeat=1000):
     """ n_inner should be larger than check interval by at around 20.
         It returns a list of for loop count.
         The first one could be anything below check interval
         The other ones should be similar.
         Anything bigger is bad.
     """
     check_interval = sys.getcheckinterval()
     print 'current check interval', check_interval
     result = []
     for i in range(n_repeat):
         j = -99
         pythonapi.PyThreadState_SetAsyncExc(c_long(thread.get_ident()),
                                             py_object(KeyboardInterrupt))
         try:
             for j in range(n_inner):
                 pass
         except KeyboardInterrupt:
             result.append(j)
     for r in result:
         if r > check_interval:
             print '  WARNING found: %i > check interval', r
     return result
import sys
# The interval is set to 100 instructions:
print(sys.getcheckinterval())
Example #45
0
 def test_setcheckinterval(self):
     self.assertRaises(TypeError, sys.setcheckinterval)
     orig = sys.getcheckinterval()
     for n in 0, 100, 120, orig: # orig last to restore starting state
         sys.setcheckinterval(n)
         self.assertEqual(sys.getcheckinterval(), n)
Example #46
0
    #     threading.Thread.__init__(self, args, kwargs)


def main():

    # t = threading.Thread(target=worker, name='worker')  # 线程对象
    t = MyThread(target=worker, name='worker')  # 线程对象

    showthreadinfo()
    print t.is_alive()
    print "id   ====> ", t.ident
    # t.start()

    while (True):
        time.sleep(1)
        if t.is_alive():
            print "True--->"
        else:
            print "False--->"


if __name__ == '__main__':
    main()

    import sys
    # Python 字节码解释器的工作原理是按照指令的顺序一条一条地顺序执行,
    # Python 内部维护着一个数值,这个数值就是 Python 内部的时钟,
    # 如果这个数值为 N,则意味着 Python 在执行了 N 条指令以后应该立即启动线程调度机制,
    # 可以通过下面的代码获取这个数值。
    print sys.getcheckinterval()
Example #47
0
import sys

v1 = sys.getcheckinterval()
print(v1)
Example #48
0
 def test_setcheckinterval(self):
     raises(TypeError, sys.setcheckinterval)
     orig = sys.getcheckinterval()
     for n in 0, 100, 120, orig:  # orig last to restore starting state
         sys.setcheckinterval(n)
         assert sys.getcheckinterval() == n
Example #49
0
table += "Compiler", platform.python_compiler()
if hasattr(sys, "api_version"): table += "Python API", sys.api_version

# Print out the main Python information
print_tc_table("Python System and Build Information", table)

#
# Gather the interpreter's nitty gritty information
#
table = []

if hasattr(sys, "builtin_module_names"):
    table += "Built-in Modules", ", ".join(sys.builtin_module_names)
table += "Byte Order", sys.byteorder + " endian"
if hasattr(sys, "getcheckinterval"):
    table += "Check Interval", sys.getcheckinterval()
if hasattr(sys, "getfilesystemencoding"):
    table += "File System Encoding", sys.getfilesystemencoding()
table += "Maximum Integer Size", str(
    sys.maxint) + " (%s)" % str(hex(sys.maxint)).upper().replace("X", "x")
if hasattr(sys, "getrecursionlimit"):
    table += "Maximum Recursion Depth", sys.getrecursionlimit()

if hasattr(sys, "tracebacklimit"):
    table += "Maximum Traceback Limit", sys.tracebacklimit
else:
    table += "Maximum Traceback Limit", "1000"

table += "Maximum Unicode Code Point", sys.maxunicode

# Print out the nitty gritty
Example #50
0
 def setUp(self):
     self._original_check_interval = sys.getcheckinterval()
     sys.setcheckinterval(1)
Example #51
0
sys.exc_clear()     # This function clears all information relating to the current or last exception that occurred in the current thread
# ............................................
sys.exc_type 
sys.exc_value 
sys.exc_traceback 
# Deprecated since version 1.5: Use exc_info() instead.
# ............................................
sys.executable  # A string giving the absolute path of the executable binary for the Python interpreter
# ............................................
sys.exit([arg])     # Exit from Python. This is implemented by raising the SystemExit exception,
# ............................................
sys.flags   # The struct sequence flags exposes the status of command line flags. The attributes are read only.
# ............................................
sys.float_repr_style    # A string indicating how the repr() function behaves for floats
# ............................................
sys.getcheckinterval()  # Return the interpreter’s “check interval”; 
# ............................................
sys.getdefaultencoding()    # Return the name of the current default string encoding used by the Unicode implementation.
# ............................................
sys.getfilesystemencoding()     # Return the name of the encoding used to convert Unicode filenames into system file names, or None if the system default encoding is used
# ............................................
sys.getrefcount(object)     # Return the reference count of the object
# ............................................
sys.getsizeof(object[, default])    # Return the size of an object in bytes
# ............................................
sys.getprofile()    # Get the profiler function as set by setprofile().
# ............................................
sys.gettrace()  # Get the trace function as set by settrace().
# ............................................
sys.getwindowsversion()     # Return a named tuple describing the Windows version currently running.
# ............................................
Example #52
0
 def setUp(self):
     self._check_interval = sys.getcheckinterval()
     sys.setcheckinterval(10)
     self.wrapped_fs = TempFS()
     self.fs = CacheFS(self.wrapped_fs, cache_timeout=0.01)
Example #53
0
def reimport(*modules):
    """Reimport python modules. Multiple modules can be passed either by
        name or by reference. Only pure python modules can be reimported.

        For advanced control, global variables can be placed in modules
        that allows finer control of the reimport process.

        If a package module has a true value for "__package_reimport__"
        then that entire package will be reimported when any of its children
        packages or modules are reimported.

        If a package module defines __reimported__ it must be a callable
        function that accepts one argument and returns a bool. The argument
        is the reference to the old version of that module before any
        cleanup has happened. The function should normally return True to
        allow the standard reimport cleanup. If the function returns false
        then cleanup will be disabled for only that module. Any exceptions
        raised during the callback will be handled by traceback.print_exc,
        similar to what happens with tracebacks in the __del__ method.
        """
    __internal_swaprefs_ignore__ = "reimport"
    reloadSet = set()

    if not modules:
        return

    # Get names of all modules being reloaded
    for module in modules:
        name, target = _find_exact_target(module)
        if not target:
            raise ValueError("Module %r not found" % module)
        if not _is_code_module(target):
            raise ValueError("Cannot reimport extension, %r" % name)

        reloadSet.update(_find_reloading_modules(name))

    # Sort module names
    reloadNames = _package_depth_sort(reloadSet, False)

    # Check for SyntaxErrors ahead of time. This won't catch all
    # possible SyntaxErrors or any other ImportErrors. But these
    # should be the most common problems, and now is the cleanest
    # time to abort.
    # I know this gets compiled again anyways. It could be
    # avoided with py_compile, but I will not be the creator
    # of messy .pyc files!
    for name in reloadNames:
        filename = getattr(sys.modules[name], "__file__", None)
        if not filename:
            continue
        pyname = os.path.splitext(filename)[0] + ".py"
        try:
            data = open(pyname, "rU").read() + "\n"
        except (IOError, OSError):
            continue

        compile(data, pyname, "exec", 0, False)  # Let this raise exceptions

    # Begin changing things. We "grab the GIL", so other threads
    # don't get a chance to see our half-baked universe
    imp.acquire_lock()
    prevInterval = sys.getcheckinterval()
    sys.setcheckinterval(min(sys.maxint, 0x7fffffff))
    try:

        # Python will munge the parent package on import. Remember original value
        parentValues = []
        parentPackageDeleted = lambda: None
        for name in reloadNames:
            parentPackageName = name.rsplit(".", 1)
            if len(parentPackageName) == 2:
                parentPackage = sys.modules.get(parentPackageName[0], None)
                parentValue = getattr(parentPackage, parentPackageName[1], parentPackageDeleted)
                if parentValue != sys.modules[name]:
                    parentValues.append((parentPackage, parentPackageName[1], parentValue))
                parentPackage = parentValue = None

        # Move modules out of sys
        oldModules = {}
        for name in reloadNames:
            oldModules[name] = sys.modules.pop(name)
        ignores = (id(oldModules),)
        prevNames = set(sys.modules)

        # Reimport modules, trying to rollback on exceptions
        try:
            try:
                for name in reloadNames:
                    if name not in sys.modules:
                        __import__(name)

            except StandardError:
                # Try to dissolve any newly import modules and revive the old ones
                newNames = set(sys.modules) - prevNames
                newNames = _package_depth_sort(newNames, True)
                for name in newNames:
                    backoutModule = sys.modules.pop(name, None)
                    if backoutModule is not None:
                        _unimport(backoutModule, ignores)
                    del backoutModule

                sys.modules.update(oldModules)
                raise

        finally:
            # Fix Python automatically shoving of children into parent packages
            for parentPackage, name, value in parentValues:
                if value == parentPackageDeleted:
                    try:
                        delattr(parentPackage, name)
                    except AttributeError:
                        pass
                else:
                    setattr(parentPackage, name, value)
            parentValues = parentPackage = parentPackageDeleted = value = None

        newNames = set(sys.modules) - prevNames
        newNames = _package_depth_sort(newNames, True)

        # Update timestamps for loaded time
        now = time.time() - 1.0
        for name in newNames:
            _module_timestamps[name] = (now, True)

        # Push exported namespaces into parent packages
        pushSymbols = {}
        for name in newNames:
            oldModule = oldModules.get(name)
            if not oldModule:
                continue
            parents = _find_parent_importers(name, oldModule, newNames)
            pushSymbols[name] = parents
        for name, parents in pushSymbols.iteritems():
            for parent in parents:
                oldModule = oldModules[name]
                newModule = sys.modules[name]
                _push_imported_symbols(newModule, oldModule, parent)

        # Rejigger the universe
        for name in newNames:
            old = oldModules.get(name)
            if not old:
                continue
            new = sys.modules[name]
            rejigger = True
            reimported = getattr(new, "__reimported__", None)
            if reimported:
                try:
                    rejigger = reimported(old)
                except StandardError:
                    # What else can we do? the callbacks must go on
                    # Note, this is same as __del__ behaviour. /shrug
                    traceback.print_exc()

            if rejigger:
                _rejigger_module(old, new, ignores)
            else:
                _unimport_module(new, ignores)

    finally:
        # Restore the GIL
        imp.release_lock()
        sys.setcheckinterval(prevInterval)
        time.sleep(0)
Example #54
0
 def setUp(self):
     self._check_interval = sys.getcheckinterval()
     sys.setcheckinterval(10)
     self.fs = ConnectionManagerFS(TempFS())
Example #55
0
 def setUp(self):
     self._check_interval = sys.getcheckinterval()
     sys.setcheckinterval(10)
     c_fs = ConnectionManagerFS(DisconnectingFS, poll_interval=0.1)
     self.fs = DisconnectRecoveryFS(c_fs)
Example #56
0
# Use psutil and sys to dump as much system informaiton at possible at once
# pip install psutil
import psutil
import sys
"""
System information
For details about resource usage like memory, disk, and processes see psutil lib
"""

print("Built in modules: ", sys.builtin_module_names)
print("Loaded modules: ", sys.modules)
print("Don't Write Bytecode: ", sys.dont_write_bytecode)
print("Python executable: ", sys.executable)
print("Python flags: ", sys.flags)
print("Check Internval: ", sys.getcheckinterval())
#print("dlopen() flags: ", sys.getdlopenflags())  # Does not work in Windows
print("Recursion limit: ", sys.getrecursionlimit())

print("Floating points: ", sys.float_info)
print("Float repr style: ", sys.float_repr_style)
#print("Long integer: ", sys.long_info)  # Not working in Windows
print("Max integer: ", sys.maxsize)
print("Max size: ", sys.maxsize)
print("Max unicode: ", sys.maxunicode)

print("Byte order: ", sys.byteorder)
print("Platform: ", sys.platform)
print("Windows version: ", sys.getwindowsversion())
print("File system encoding: ", sys.getfilesystemencoding())
print("Default encoding: ", sys.getdefaultencoding())
print("C API version: ", sys.api_version)
Example #57
0
s = 'hello world'
print(s.capitalize())
print(s.upper())
print(s.center(40, "#"))
print(s.count('l'))
print('_'.join(s))

print('%04d' % 44)
print('%04d %7.3f %d' % (3, 45.6, 44))
print('%(v1)04d %(v2)7.3f %(v1)d' % {'v1': 3, 'v2': 45.6})
print('{0:04d} {1:f} {v1}'.format(31, 36.76, v1=121))
print('{0:04d} {1:f} {v1}'.format(31, 36.76, **{'v1': 141}))

print('sys.executable', sys.executable)
print('sys.getcheckinterval', sys.getcheckinterval())
print('sys.getdefaultencoding', sys.getdefaultencoding())
print('sys.getfilesystemencoding', sys.getfilesystemencoding())
print('sys.getrecursionlimit', sys.getrecursionlimit())
print('sys.maxsize', sys.maxsize)
print('sys.maxunicode', sys.maxunicode)

print('os.getcwd', os.getcwd())
print('os.getpid', os.getpid())
print('alternative seperator', os.path.altsep)

help(os.walk)
help(shutil.move)

subprocess.call(['ls', '-al'])
print('printed to file: ls_output.txt', subprocess.check_output(['ls', '-al']))
Example #58
0
    def run(self,
            task,
            start_velocity,
            total_blobs,
            blob_seed,
            replay=False,
            wheel_positions=None,
            throttle_positions=None,
            level=1,
            is_gate_on=True):
        """
        The main loop of the program

        :param task: [description]
        :type task: [type]
        :param start_velocity: velocity when game start
        :type start_velocity: float
        :param total_blobs: total number of blobs
        :type total_blobs: int
        :param blob_seed: random seed number
        :type blob_seed: int
        :param replay: [description], defaults to False
        :type replay: bool, optional
        :param wheel_positions: positions of the wheel, defaults to None
        :type wheel_positions: float, optional
        :param throttle_positions: [description], defaults to None
        :type throttle_positions: [type], optional
        :return: the run information
        """

        self.define_display()
        display_rate = refresh_rate

        car, left_lane, right_lane = self.create_objects(
            total_blobs, blob_seed, task, level)

        background_effect_left = 0  # how many rounds the collision effect (changed background color) will be in use
        last_collision = 0  # timestamp of the last collision
        collision_count = 0  # how many collisions so far
        collision_speed_drops = 0  # how many punished (speed decreasing) collisions
        debug_label = label(pos=car.pos, height=20, border=6, box=0,
                            opacity=0)  # debug info text on the car
        debug_label.visible = False

        debug = False
        autopiloting = False
        cheated = False
        batch = False
        pause = False
        play1 = False

        wheel.initx()
        task_description = task_string(task, start_velocity)
        self.wait_start(task_description)
        velocity = start_velocity

        path = []
        step = 0
        max_velocity = velocity
        last_y = self.blobs[-1].y + 2 * safe_back_y
        #input handling
        while car.y < last_y:
            if not batch:
                rate(display_rate)
            clock_begin = time.clock()

            if self.scene.kb.keys:
                cheated = True
                key = self.scene.kb.getkey()
                if key == 'q':
                    self.scene.visible = False
                    return None
                elif key == 'p':
                    pause = not pause
                elif key == ' ':
                    play1 = True
                    self.scene.center.z = 2 * self.scene.center.z
                    self.scene.center.z = self.scene.center.z / 2
                elif key == "+":
                    display_rate *= 2
                elif key == '-':
                    display_rate /= 2
                    if display_rate == 0:
                        display_rate = 1
                elif key == 'd':
                    debug = not debug
                    debug_label.visible = debug
                elif key == 'a':
                    autopiloting = not autopiloting
                elif key == 'b':
                    batch = not batch

            if play1:
                pause = True
                play1 = False
            elif pause:
                continue

            # blob housekeeping
            passed = self.reposition_blobs(car.pos.y, step)

            controlled_velocity = self.gate_passed(car.pos.y, is_gate_on)

            if controlled_velocity <> 0:
                velocity = controlled_velocity

            if replay:
                wheelpos = wheel_positions[step]
                throttlepos = throttle_positions[step]
            else:
                (w, t, b, c) = wheel.getprecise()
                wheelpos = w / 1000.0
                throttlepos = t / 1000.0
                brakepos = b / 1000.0
                clutchpos = c / 1000.0

                if autopiloting:
                    wheelpos = self.autopilot(car.pos.x, car.pos.y, velocity)

            # Velocity changes here except collision effects
            if task == auto_speed:
                chosen_velocity = velocity  # to be used in collision analyses
                velocity += passed * nocollision_velocity_up
            elif task == manual_speed:
                throttle_ratio = 1.0 * (throttlepos - pedal_neutral) / (
                    pedal_down - pedal_neutral)
                velocity = sqrt((1.0 - throttle_base) * velocity**2 +
                                2**throttle_multiply * throttle_base *
                                throttle_ratio**throttle_power)
                chosen_velocity = velocity
            else:
                chosen_velocity = velocity

            if max_velocity < velocity:
                max_velocity = velocity

            # Steering
            xp = car.pos.x + wheelpos * velocity / wheel_sensitivity

            if (xp > right_lane_x - lane_margin):
                xp = right_lane_x - lane_margin
            if (xp < left_lane_x + lane_margin):
                xp = left_lane_x + lane_margin

            if (background_effect_left):
                background_effect_left -= 1
                if not background_effect_left:
                    self.scene.background = (0, 0, 0)

            # All the movement happens here
            old_interval = sys.getcheckinterval()
            sys.setcheckinterval(100000)
            car.pos.x = xp
            car.pos.y = car.pos.y + velocity
            debug_label.pos = car.pos

            self.scene.center.x = xp
            self.scene.center.y = self.scene.center.y + velocity
            left_lane.pos.y = left_lane.pos.y + velocity
            right_lane.pos.y = right_lane.pos.y + velocity
            sys.setcheckinterval(old_interval)

            # Collision detection and handing

            collision, collided_color = self.check_collision(
                car.pos.x, car.pos.y, step)
            if (collision):
                collision_count = collision_count + 1
                if task == auto_speed or task == fixed_speed:
                    self.scene.background = (0.5, 0.5, 0.5)
                    background_effect_left += 5
                    if (step - last_collision > collision_grace_period):
                        if task == auto_speed:
                            velocity -= collision_velocity_down
                            if (velocity < guaranteed_velocity):
                                velocity = guaranteed_velocity
                        collision_speed_drops = collision_speed_drops + 1
                        last_collision = step
                elif task == manual_speed:
                    penalty_time = collision_penalty_time[collided_color]
                    if self.penalty_box(car.pos, penalty_time):
                        cheated = True
                    velocity = 0

            debug_label.text = 'Speed %.3f\nMaxSp %.3f\nSpeed drops %i\nCollisions %i\nBlobs %i\nGate %i ' % (
                velocity, max_velocity, collision_speed_drops, collision_count,
                self.first_visible_blob, len(self.gates))

            p = PathEntry()
            p.step = step
            p.x = car.pos.x
            p.y = car.pos.y
            p.collision = collision
            p.wheelpos = wheelpos
            p.throttlepos = throttlepos
            p.velocity = velocity
            p.chosen_velocity = chosen_velocity
            p.clock_begin = clock_begin
            p.time = datetime.datetime.fromtimestamp(time.time())
            path.append(p)
            step += 1

        # After while loop

        clock_diff = path[-1].clock_begin - path[0].clock_begin
        step_diff = path[-1].step - path[0].step
        print "Time:", clock_diff
        print "Average step duration:", clock_diff / step_diff
        print "Steps per second", step_diff / clock_diff
        print "Total steps", step_diff
        print "End speed", velocity
        print "Max speed", max_velocity
        print "Collisions", collision_count
        print "Collision speed drops", collision_speed_drops

        start_label = label(pos=(car.pos.x, car.pos.y + 40, car.pos.z),
                            height=24,
                            border=10,
                            opacity=1)
        start_label.text = 'Run finished'
        start_label.visible = True
        time.sleep(2)
        self.scene.visible = 0

        #cheated = False
        return path, self.blobs, cheated, collision_count, collision_speed_drops, velocity
Example #59
0

if __name__ == "__main__":
    parser = optparse.OptionParser(
        usage="%prog [options] benchmark_name",
        description="Test the performance of Python's threads.")
    parser.add_option("--num_threads",
                      action="store",
                      type="int",
                      default=2,
                      dest="num_threads",
                      help="Number of threads to test.")
    parser.add_option("--check_interval",
                      action="store",
                      type="int",
                      default=sys.getcheckinterval(),
                      dest="check_interval",
                      help="Value to pass to sys.setcheckinterval().")
    util.add_standard_options_to(parser)
    options, args = parser.parse_args()

    if len(args) != 1:
        parser.error("incorrect number of arguments")

    bm_name = args[0].lower()
    func = globals().get("test_" + bm_name)
    if not func:
        parser.error("unknown benchmark: %s" % bm_name)

    sys.setcheckinterval(options.check_interval)
    util.run_benchmark(options, options.num_runs, func, options.num_threads)
Example #60
0
def autoschedule(bytecodes=None):
    if bytecodes is None:
        bytecodes = sys.getcheckinterval()
    while stackless.runcount > 1:
        schedule_cb(stackless.run(bytecodes))