Пример #1
0
def test_zip():
    '''Test `exists` works on zip-imported modules.'''

    assert not exists('zip_imported_module_bla_bla')

    zip_string = pkg_resources.resource_string(resources_package,
                                               'archive_with_module.zip')

    with temp_file_tools.TemporaryFolder(prefix='temp_test_garlicsim_') \
                                                          as temp_folder:

        temp_zip_path = os.path.join(temp_folder, 'archive_with_module.zip')

        with open(temp_zip_path, 'wb') as temp_zip_file:

            temp_zip_file.write(zip_string)

        assert not exists('zip_imported_module_bla_bla')

        with sys_tools.TempSysPathAdder(temp_zip_path):
            assert exists('zip_imported_module_bla_bla')
            import zip_imported_module_bla_bla
            assert zip_imported_module_bla_bla.__doc__ == \
                   ('Module for testing `import_tools.exists` on zip-archived '
                    'modules.')
Пример #2
0
def test_zip():
    '''Test `exists` works on zip-imported modules.'''
    
    assert not exists('zip_imported_module_bla_bla')
    
    zip_string = pkg_resources.resource_string(resources_package,
                                               'archive_with_module.zip')
    
    with temp_file_tools.TemporaryFolder(prefix='temp_test_garlicsim_') \
                                                          as temp_folder:

        temp_zip_path = os.path.join(temp_folder, 'archive_with_module.zip')
        
        with open(temp_zip_path, 'wb') as temp_zip_file:
            
            temp_zip_file.write(zip_string)            
                
        assert not exists('zip_imported_module_bla_bla')
        
        with sys_tools.TempSysPathAdder(temp_zip_path):
            assert exists('zip_imported_module_bla_bla')
            import zip_imported_module_bla_bla
            assert zip_imported_module_bla_bla.__doc__ == \
                   ('Module for testing `import_tools.exists` on zip-archived '
                    'modules.')
            
def test_multiprocessing_lock():
    '''Test `describe` works for `multiprocessing.Lock()`.'''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` not installed')
    import multiprocessing
    lock = multiprocessing.Lock()
    describe(lock)
def test_non_atomically_pickleables_multiprocessing():
    '''
    Test `is_atomically_pickleable` on non-atomically pickleable objects.
    
    Not including `multiprocessing` objects.
    '''
    
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')
    
    import multiprocessing

    non_pickleables = [
        multiprocessing.Lock(),
        multiprocessing.BoundedSemaphore(),
        multiprocessing.Condition(),
        multiprocessing.JoinableQueue(),
        multiprocessing.Pool(),
        multiprocessing.Queue(),
        multiprocessing.RLock(),
        multiprocessing.Semaphore(),
    ]
        
    for thing in non_pickleables:
        assert not pickle_tools.is_atomically_pickleable(thing)
        assert not is_pickle_successful(thing)
    
    assert not pickle_tools.is_atomically_pickleable(NonPickleableObject())
Пример #5
0
def test_partially_pickleables_multiprocessing():
    '''
    "Partially-pickleable" means an object which is atomically pickleable but
    not pickleable.
    '''

    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')

    import multiprocessing

    x = PickleableObject()
    x.lock = threading.Lock()

    partially_pickleables = [
        x, [multiprocessing.BoundedSemaphore()], {
            1: multiprocessing.Lock(),
            2: 3
        },
        set([multiprocessing.Queue(), x])
    ]

    for thing in partially_pickleables:
        assert pickle_tools.is_atomically_pickleable(thing)
        assert not is_pickle_successful(thing)
Пример #6
0
def test_multiprocessing_lock():
    '''Test `describe` works for `multiprocessing.Lock()`.'''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` not installed')
    import multiprocessing
    lock = multiprocessing.Lock()
    describe(lock)
Пример #7
0
def test_non_atomically_pickleables_multiprocessing():
    '''
    Test `is_atomically_pickleable` on non-atomically pickleable objects.
    
    Not including `multiprocessing` objects.
    '''

    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')

    import multiprocessing

    non_pickleables = [
        multiprocessing.Lock(),
        multiprocessing.BoundedSemaphore(),
        multiprocessing.Condition(),
        multiprocessing.JoinableQueue(),
        multiprocessing.Pool(),
        multiprocessing.Queue(),
        multiprocessing.RLock(),
        multiprocessing.Semaphore(),
    ]

    for thing in non_pickleables:
        assert not pickle_tools.is_atomically_pickleable(thing)
        assert not is_pickle_successful(thing)

    assert not pickle_tools.is_atomically_pickleable(NonPickleableObject())
Пример #8
0
    def __init__(self, crunching_manager, initial_state, crunching_profile):

        BaseCruncher.__init__(self, crunching_manager, initial_state,
                              crunching_profile)

        if not import_tools.exists('multiprocessing'):
            raise Exception(multiprocessing_missing_text)

        from .process import Process

        self.process = Process(self.project.simpack_grokker.get_step_iterator,
                               initial_state, crunching_profile)
        '''The actual process which does the crunching.'''

        self.work_queue = self.process.work_queue
        '''
        Queue for putting completed work to be picked up by the main thread.
        
        In this queue the cruncher will put the states that it produces, in
        chronological order. If the cruncher reaches a simulation ends, it will
        put an `EndMarker` in this queue.
        '''

        self.order_queue = self.process.order_queue
        '''Queue for receiving instructions from the main thread.'''
Пример #9
0
def test():
    '''Test the basic workings of `exists`.'''
    assert not exists('adfgadbnv5nrn')
    assert not exists('45gse_e5b6_DFDF')
    assert not exists('VWEV65hnrt___a4')
    assert exists('email')
    assert exists('re')
    assert exists('sys')
    nose.tools.assert_raises(Exception, lambda: exists('email.encoders'))
Пример #10
0
def test():
    '''Test the basic workings of `exists`.'''
    assert not exists('adfgadbnv5nrn')
    assert not exists('45gse_e5b6_DFDF')
    assert not exists('VWEV65hnrt___a4')
    assert exists('email')
    assert exists('re')
    assert exists('sys')
    nose.tools.assert_raises(Exception, lambda: exists('email.encoders'))
def test():
    '''Test cute-(un)pickling on various objects.'''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')
    
    import multiprocessing
    
    totally_pickleable_things = [
        [1, 2, (3, 4)],
        {1: 2, 3: set((1, 2, 3))},
        None, True, False,
        (1, 2, 'meow'),
        'qweqweqasd',
        PickleableObject()
    ]
    
    thing = Object()
    thing.a, thing.b, thing.c, thing.d, thing.e, thing.f, thing.g, thing.h = \
         totally_pickleable_things
    
    thing.x = threading.Lock()
    thing.y = multiprocessing.Lock()
    thing.z = NonPickleableObject()
    
    stream = BytesIO() 
    pickler = CutePickler(stream)
    pickler.dump(thing) 

    stream.seek(0) 
    unpickler = CuteUnpickler(stream) 
    unpickled_thing = unpickler.load() 
    
    assert thing.a == unpickled_thing.a
    assert thing.b == unpickled_thing.b
    assert thing.c == unpickled_thing.c
    assert thing.d == unpickled_thing.d
    assert thing.e == unpickled_thing.e
    assert thing.f == unpickled_thing.f
    assert thing.g == unpickled_thing.g
    # Regarding `.h`, we just check the type cause there's no `__eq__`:
    assert type(thing.h) == type(unpickled_thing.h)
    
    assert thing.x != unpickled_thing.x
    assert thing.y != unpickled_thing.y
    assert thing.z != unpickled_thing.z
    
    assert isinstance(unpickled_thing.x, pickle_tools.FilteredObject)
    assert isinstance(unpickled_thing.y, pickle_tools.FilteredObject)
    assert isinstance(unpickled_thing.z, pickle_tools.FilteredObject)
    
    
    
    
    
def _check_process_passing(cross_process_persistent_class):
    '''
    Test that CPPs maintain their identities when passed between processes.
    '''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')
    
    cpp_1 = cross_process_persistent_class()
    cpp_2 = cross_process_persistent_class()
    cpp_3 = cross_process_persistent_class()
    
    process = Process()
    process.start()
    
    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1
    
    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1
    
    process.work_queue.put((2, cpp_2))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_2
    
    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1
    
    process.work_queue.put((2, cpp_2))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_2
    
    process.work_queue.put((3, cpp_3))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_3
    
    process.work_queue.put((3, cpp_3))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_3
    
    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1
    
    process.terminate()
Пример #13
0
def _check_process_passing(cross_process_persistent_class):
    '''
    Test that CPPs maintain their identities when passed between processes.
    '''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')

    cpp_1 = cross_process_persistent_class()
    cpp_2 = cross_process_persistent_class()
    cpp_3 = cross_process_persistent_class()

    process = Process()
    process.start()

    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1

    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1

    process.work_queue.put((2, cpp_2))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_2

    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1

    process.work_queue.put((2, cpp_2))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_2

    process.work_queue.put((3, cpp_3))
    assert process.message_queue.get(timeout=10) == 'Stored object.'
    assert process.processed_items_queue.get(timeout=10) is cpp_3

    process.work_queue.put((3, cpp_3))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_3

    process.work_queue.put((1, cpp_1))
    assert process.message_queue.get(timeout=10) == 'Asserted identity.'
    assert process.processed_items_queue.get(timeout=10) is cpp_1

    process.terminate()
Пример #14
0
def test():
    '''Test cute-(un)pickling on various objects.'''
    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')

    import multiprocessing

    totally_pickleable_things = [[1, 2, (3, 4)], {
        1: 2,
        3: set((1, 2, 3))
    }, None, True, False, (1, 2, 'meow'), u'qweqweqasd',
                                 PickleableObject()]

    thing = Object()
    thing.a, thing.b, thing.c, thing.d, thing.e, thing.f, thing.g, thing.h = \
         totally_pickleable_things

    thing.x = threading.Lock()
    thing.y = multiprocessing.Lock()
    thing.z = NonPickleableObject()

    stream = StringIO()
    pickler = CutePickler(stream)
    pickler.dump(thing)

    stream.seek(0)
    unpickler = CuteUnpickler(stream)
    unpickled_thing = unpickler.load()

    assert thing.a == unpickled_thing.a
    assert thing.b == unpickled_thing.b
    assert thing.c == unpickled_thing.c
    assert thing.d == unpickled_thing.d
    assert thing.e == unpickled_thing.e
    assert thing.f == unpickled_thing.f
    assert thing.g == unpickled_thing.g
    # Regarding `.h`, we just check the type cause there's no `__eq__`:
    assert type(thing.h) == type(unpickled_thing.h)

    assert thing.x != unpickled_thing.x
    assert thing.y != unpickled_thing.y
    assert thing.z != unpickled_thing.z

    assert isinstance(unpickled_thing.x, pickle_tools.FilteredObject)
    assert isinstance(unpickled_thing.y, pickle_tools.FilteredObject)
    assert isinstance(unpickled_thing.z, pickle_tools.FilteredObject)
Пример #15
0
def _platform_supports_multiprocessing_qsize():
    '''
    Return whether this platform supports `multiprocessing.Queue().qsize()`.
    
    I'm looking at you, Mac OS.
    '''
    if 'multiprocessing' not in sys.modules:
        if not import_tools.exists('multiprocessing'):
            return False
    import multiprocessing
    multiprocessing_queue = multiprocessing.Queue()
    try:
        multiprocessing_queue.qsize()
    except NotImplementedError:
        return False
    else:
        return True
Пример #16
0
def troubleshoot_pstats():
    '''
    Let the user know if there might be an error importing `pstats`.
    
    Raises an exception if it thinks it caught the problem. So if this function
    didn't raise an exception, it means it didn't manage to diagnose the
    problem.
    '''
    if not import_tools.exists('pstats') and os.name == 'posix':

        raise ImportError(
            "The required `pstats` Python module is not installed on your "
            "computer. Since you are using Linux, it's possible that this is "
            "because some Linux distributions don't include `pstats` by "
            "default. You should be able to fix this by installing the "
            "`python-profiler` package in your OS's package manager. "
            "(Possibly you will have to get this package from the multiverse.)"
        )
Пример #17
0
    def can_be_used_with_simpack_grokker(simpack_grokker):
        '''
        Return whether `ProcessCruncher` can be used with `simpack_grokker`.
        
        For `ProcessCruncher` to be usable, the `multiprocessing` module must be
        installed. Assuming it's installed, `ProcessCruncher` can be used if and
        only if the simpack is not history-dependent.
        '''

        if not import_tools.exists('multiprocessing'):
            return ReasonedBool(False, multiprocessing_missing_text)

        elif simpack_grokker.history_dependent:
            return ReasonedBool(
                False, "`ProcessCruncher` can't be used in history-dependent "
                "simulations because processes don't share memory.")

        else:
            return True
def troubleshoot_pstats():
    '''
    Let the user know if there might be an error importing `pstats`.
    
    Raises an exception if it thinks it caught the problem. So if this function
    didn't raise an exception, it means it didn't manage to diagnose the
    problem.
    '''    
    if not import_tools.exists('pstats') and os.name == 'posix':
        
        raise ImportError(
            "The required `pstats` Python module is not installed on your "
            "computer. Since you are using Linux, it's possible that this is "
            "because some Linux distributions don't include `pstats` by "
            "default. You should be able to fix this by installing the "
            "`python-profiler` package in your OS's package manager. "
            "(Possibly you will have to get this package from the multiverse.)"
        )
    
        
Пример #19
0
    def can_be_used_with_simpack_grokker(simpack_grokker):
        """
        Return whether `ProcessCruncher` can be used with `simpack_grokker`.
        
        For `ProcessCruncher` to be usable, the `multiprocessing` module must be
        installed. Assuming it's installed, `ProcessCruncher` can be used if and
        only if the simpack is not history-dependent.
        """

        if not import_tools.exists("multiprocessing"):
            return ReasonedBool(False, multiprocessing_missing_text)

        elif simpack_grokker.history_dependent:
            return ReasonedBool(
                False,
                "`ProcessCruncher` can't be used in history-dependent "
                "simulations because processes don't share memory.",
            )

        else:
            return True
Пример #20
0
    def __init__(self, crunching_manager, initial_state, crunching_profile):

        BaseCruncher.__init__(self, crunching_manager, initial_state, crunching_profile)

        if not import_tools.exists("multiprocessing"):
            raise Exception(multiprocessing_missing_text)

        from .process import Process

        self.process = Process(self.project.simpack_grokker.get_step_iterator, initial_state, crunching_profile)
        """The actual process which does the crunching."""

        self.work_queue = self.process.work_queue
        """
        Queue for putting completed work to be picked up by the main thread.
        
        In this queue the cruncher will put the states that it produces, in
        chronological order. If the cruncher reaches a simulation ends, it will
        put an `EndMarker` in this queue.
        """

        self.order_queue = self.process.order_queue
        """Queue for receiving instructions from the main thread."""
def test_partially_pickleables_multiprocessing():
    '''
    "Partially-pickleable" means an object which is atomically pickleable but
    not pickleable.
    '''

    if not import_tools.exists('multiprocessing'):
        raise nose.SkipTest('`multiprocessing` is not installed.')
    
    import multiprocessing
    
    x = PickleableObject()
    x.lock = threading.Lock()
    
    partially_pickleables = [
        x,
        [multiprocessing.BoundedSemaphore()],
        {1: multiprocessing.Lock(), 2: 3},
        set([multiprocessing.Queue(), x])
    ]
    
    for thing in partially_pickleables:
        assert pickle_tools.is_atomically_pickleable(thing)
        assert not is_pickle_successful(thing)
Пример #22
0
from garlicsim.general_misc import import_tools

import garlicsim

from .state import State

HISTORY_DEPENDENT = False
N_STEP_FUNCTIONS = 1
DEFAULT_STEP_FUNCTION = State.step
DEFAULT_STEP_FUNCTION_TYPE = \
    garlicsim.misc.simpack_grokker.step_types.SimpleStep
CONSTANT_CLOCK_INTERVAL = 1
ENDABLE = False
PROBLEM = None
VALID = True
CRUNCHERS_LIST = \
    [garlicsim.asynchronous_crunching.crunchers.ThreadCruncher] + \
    (
        [garlicsim.asynchronous_crunching.crunchers.ProcessCruncher] if 
        import_tools.exists('multiprocessing')
        else []
    )
Пример #23
0
# This program is distributed under the LGPL2.1 license.

"""
This package defines the `ProcessCruncher` class.

See its documentation for more information.
"""

from .process_cruncher import ProcessCruncher


### Warning if `multiprocessing` isn't installed: #############################
#                                                                             #

from garlicsim.general_misc import import_tools

if not import_tools.exists("multiprocessing"):
    import warnings

    warnings.warn(
        "You don't have the `multiprocessing` package installed. "
        "GarlicSim will run, but it won't be able to use "
        "`ProcessCruncher` in order to take advantage of multiple "
        "processor cores for crunching."
    )

del import_tools

#                                                                             #
### Finished warning about `multiprocessing`. #################################
Пример #24
0
def test():
    '''Test tutorial-2.'''
    # Asserting we don't have a `_coin_flip` on path already in some other
    # place:
    assert not import_tools.exists('_coin_flip')
    
    with temp_file_tools.TemporaryFolder(prefix='temp_test_garlicsim_') \
                                                          as temp_folder:
        with TempWorkingDirectorySetter(temp_folder):
            with sys_tools.OutputCapturer() as output_capturer:
                garlicsim.scripts.start_simpack.start(
                    argv=['start_simpack.py', '_coin_flip']
                )
            assert output_capturer.output == \
                ('`_coin_flip` simpack created successfully! Explore the '
                 '`_coin_flip` folder and start filling in the contents of '
                 'your new simpack.\n')
            simpack_path = os.path.join(temp_folder, '_coin_flip')
            assert os.path.isdir(simpack_path)
                                                                      
            state_module_path = os.path.join(simpack_path, 'state.py')
            
            assert_module_was_copied_with_correct_newlines(
                state_module_path,
                garlicsim.scripts.simpack_template.simpack_name.state
            )
            
            with open(state_module_path, 'w') as state_file:
                state_file.write(state_module_contents_for_coinflip)
                
                         
            with sys_tools.TempSysPathAdder(temp_folder):
                import _coin_flip
                
                assert _coin_flip.__doc__ == '\n_coin_flip description.\n'
                assert _coin_flip.name == '_coin_flip'                
                
                state = _coin_flip.State.create_root()
                assert repr(vars(state)) == \
                       "{'balance': 5000, 'last_bet_result': 0}"

                new_state = garlicsim.simulate(state, 5)
                assert repr(vars(new_state)) == \
                "{'balance': %s, 'clock': %s, 'last_bet_result': %s}" % \
                (new_state.balance, new_state.clock, new_state.last_bet_result)
                
                from garlicsim.general_misc.infinity import infinity
                
                got_winner = False
                got_loser = False
                while not (got_winner and got_loser):
                    new_state = garlicsim.simulate(state, infinity)
                    assert repr(vars(new_state)) == \
                           ("{'balance': %s, 'clock': %s, 'last_bet_result': "
                            "%s}" % (new_state.balance, new_state.clock,
                            new_state.last_bet_result))
                    assert new_state.balance <= 6000
                    if new_state.balance == 6000:
                        assert new_state.last_bet_result > 0
                        got_winner = True
                        continue
                    else:
                        assert new_state.last_bet_result < 0
                        assert new_state.balance <= \
                            (-2) * new_state.last_bet_result
                        got_loser = True
                        continue
                    
                states = garlicsim.list_simulate(state, infinity)
                len(states)
                assert re.match(
                    r'^\[5000(, \d+)+\]$',
                    repr([s.balance for s in states])
                )
                
                def get_end_balance():
                    return garlicsim.simulate(state, infinity).balance
                results = [get_end_balance() for i in range(100)]
                assert 3000 < (sum(results) / len(results)) < 6000
                assert 0.4 < (results.count(6000)/len(results)) < 0.95
Пример #25
0
def test():
    '''Test tutorial-2.'''
    # Asserting we don't have a `_coin_flip` on path already in some other
    # place:
    assert not import_tools.exists('_coin_flip')

    with temp_file_tools.TemporaryFolder(prefix='temp_test_garlicsim_') \
                                                          as temp_folder:
        with TempWorkingDirectorySetter(temp_folder):
            with sys_tools.OutputCapturer() as output_capturer:
                garlicsim.scripts.start_simpack.start(
                    argv=['start_simpack.py', '_coin_flip'])
            assert output_capturer.output == \
                ('`_coin_flip` simpack created successfully! Explore the '
                 '`_coin_flip` folder and start filling in the contents of '
                 'your new simpack.\n')
            simpack_path = os.path.join(temp_folder, '_coin_flip')
            assert os.path.isdir(simpack_path)

            state_module_path = os.path.join(simpack_path, 'state.py')

            assert_module_was_copied_with_correct_newlines(
                state_module_path,
                garlicsim.scripts.simpack_template.simpack_name.state)

            with open(state_module_path, 'w') as state_file:
                state_file.write(state_module_contents_for_coinflip)

            with sys_tools.TempSysPathAdder(temp_folder):
                import _coin_flip

                assert _coin_flip.__doc__ == '\n_coin_flip description.\n'
                assert _coin_flip.name == '_coin_flip'

                state = _coin_flip.State.create_root()
                assert repr(vars(state)) == \
                       "{'balance': 5000, 'last_bet_result': 0}"

                new_state = garlicsim.simulate(state, 5)
                assert repr(vars(new_state)) == \
                "{'balance': %s, 'clock': %s, 'last_bet_result': %s}" % \
                (new_state.balance, new_state.clock, new_state.last_bet_result)

                from garlicsim.general_misc.infinity import infinity

                got_winner = False
                got_loser = False
                while not (got_winner and got_loser):
                    new_state = garlicsim.simulate(state, infinity)
                    assert repr(vars(new_state)) == \
                           ("{'balance': %s, 'clock': %s, 'last_bet_result': "
                            "%s}" % (new_state.balance, new_state.clock,
                            new_state.last_bet_result))
                    assert new_state.balance <= 6000
                    if new_state.balance == 6000:
                        assert new_state.last_bet_result > 0
                        got_winner = True
                        continue
                    else:
                        assert new_state.last_bet_result < 0
                        assert new_state.balance <= \
                            (-2) * new_state.last_bet_result
                        got_loser = True
                        continue

                states = garlicsim.list_simulate(state, infinity)
                len(states)
                assert re.match(r'^\[5000(, \d+)+\]$',
                                repr([s.balance for s in states]))

                def get_end_balance():
                    return garlicsim.simulate(state, infinity).balance

                results = [get_end_balance() for i in range(100)]
                assert 3000 < (sum(results) / len(results)) < 6000
                assert 0.4 < (results.count(6000) / len(results)) < 0.95
Пример #26
0
from garlicsim.general_misc import import_tools

import garlicsim

from .state import State

ENDABLE = False
PROBLEM = None
VALID = True
CONSTANT_CLOCK_INTERVAL = None
HISTORY_DEPENDENT = False
N_STEP_FUNCTIONS = 2
DEFAULT_STEP_FUNCTION = State.step_generator
DEFAULT_STEP_FUNCTION_TYPE = garlicsim.misc.simpack_grokker.step_types.SimpleStep
CRUNCHERS_LIST = [garlicsim.asynchronous_crunching.crunchers.ThreadCruncher] + (
    [garlicsim.asynchronous_crunching.crunchers.ProcessCruncher] if import_tools.exists("multiprocessing") else []
)