def network_setup(realtime=False):
    import simpy
    if not realtime:
        env = simpy.Environment()
    else:
        env = simpy.RealtimeEnvironment(strict=False, factor=2)
    available_links = simpy.FilterStore(env)
    pending_messages = simpy.FilterStore(env)
    send_messages = simpy.Store(env)
    log = simpy.Store(env)

    return Network(env, available_links, pending_messages, send_messages, log)
 def define_knowledge_base(self, env):
     global_task_list = simpy.FilterStore(
         env)  # This needs to be a table in SQL database
     global_robot_list = simpy.FilterStore(
         env)  # This needs to be a table in SQL database
     tasks_executing = simpy.FilterStore(
         env)  # This needs to be a table in SQL database
     kb = dict()
     kb['global_task_list'] = global_task_list
     kb['global_robot_list'] = global_robot_list
     kb['tasks_executing'] = tasks_executing
     kb['charge_locations'] = self.charge_locations
     kb['graph'] = self.graph
     return kb
Exemplo n.º 3
0
def test_case_1(env):
    """Complex test case: 3 heat sources introduced during the scenario and
    one of them removed later. Initial room temp is 290K with 288K as the
    low equilibrium.
    """
    room = ContainingSpace(env,288,290,(5*5*3))
    event_queue = simpy.FilterStore(env)
    hs_1 = CbHeater(env,'h1',room,event_queue,['ping','heat_on','heat_off'],10,100)
    hs_2 = CbHeater(env,'h2',room,event_queue,['ping','heat_on','heat_off'],10,300)
    hs_3 = CbHeater(env,'h3',room,event_queue,['ping','heat_on','heat_off'],10,200)
    tmeter1 = CbThermometer(env,'t1',room,event_queue,['ping'],10)
    heat_source_actions = [
        (100,'add',hs_1),
        (300,'add',hs_2),
        (600,'change',hs_2),
        (810,'add',hs_3),
        (90,'remove',hs_1)
    ]                
    env.process(heat_source_activity(env,room,event_queue,heat_source_actions))
    env.process(temp_listener(event_queue))
    # Execute!
    print('==== Start of test_case_1 ====')
    env.run(until=SIM_TIME)
    # End of run
    print('#### End of test_case_1 after %d seconds ####' % SIM_TIME)
Exemplo n.º 4
0
 def __init__(self, env):
     self.env = env
     self.store = simpy.FilterStore(env)
     self.env.process(self.producerA())
     self.env.process(self.producerB())
     self.env.process(self.consumerA())
     self.env.process(self.consumerB())
Exemplo n.º 5
0
 def __init__(self,
              env,
              user_number=None,
              user_pattern=None,
              application=None,
              out_link=None,
              host_index=None,
              ip_address=None,
              server_ip=None):
     self.user_pattern = user_pattern
     self.user_number = user_number
     self.application = application
     self.env = env
     self.out_link = out_link
     self.in_link = simpy.FilterStore(self.env)
     self.ip_address = ip_address
     self.server_ip = server_ip
     self.Users = [
         User(env=self.env,
              out_link=self.out_link,
              in_link=self.in_link,
              index=i,
              application_process_time=self.application,
              ip_src=ip_address,
              ip_dst=self.server_ip) for i in range(self.user_number)
     ]
     self.host_index = host_index
Exemplo n.º 6
0
def test_filter_store_get_after_mismatch(env):
    """Regression test for issue #49.

    Triggering get-events after a put in FilterStore wrongly breaks after the
    first mismatch.

    """
    def putter(env, store):
        # The order of putting 'spam' before 'eggs' is important here.
        yield store.put('spam')
        yield env.timeout(1)
        yield store.put('eggs')

    def getter(store):
        # The order of requesting 'eggs' before 'spam' is important here.
        eggs = store.get(lambda i: i == 'eggs')
        spam = store.get(lambda i: i == 'spam')

        ret = yield spam | eggs
        assert spam in ret and eggs not in ret
        assert env.now == 0

        yield eggs
        assert env.now == 1

    store = simpy.FilterStore(env, capacity=2)
    env.process(getter(store))
    env.process(putter(env, store))
    env.run()
Exemplo n.º 7
0
    def __init__(self,
                 env: simpy.Environment,
                 width: int,
                 height: int,
                 interval: int,
                 num_drivers: int,
                 hex_area: float,
                 units_per_km: int = 1,
                 seed: int = None):
        """

        :param env: simpy environment
        :param width: width of the grid
        :param height: height of the grid
        :param interval: distance between two spots
        :param num_drivers: number of drivers in the grid
        :param hex_area: area size of a single hex tile
        :param units_per_km: number of grid units per km
        """
        if seed is not None:
            np.random.seed(seed)

        self.width = width
        self.height = height
        self.interval = interval
        self.hex_overlay = HexGrid(hex_area=hex_area,
                                   width=width,
                                   height=height,
                                   units_per_km=units_per_km)
        self.taxi_spots, self.spot_height, self.spot_width = get_spot_locations(
            width=width, height=height, interval=interval)
        self.driver_pools = simpy.FilterStore(env, capacity=num_drivers)
Exemplo n.º 8
0
    def __init__(self, env, name):
        super().__init__(env, name)
        self.name = "client-%s" % str(self.name)
        self.input_queue = simpy.FilterStore(env)

        self.connected_service = None
        self.served_requests = []
Exemplo n.º 9
0
    def __init__(self, env, responderList, status):
        self.env = env
        self.free = simpy.FilterStore(env)
        self.responderList = responderList
        self.status = status

        for responder in responderList:
            self.free.put(responder)
Exemplo n.º 10
0
    def pem(env):
        store = simpy.FilterStore(env, capacity=2)

        get_event = store.get(lambda item: item == 'b')
        yield store.put('a')
        assert not get_event.triggered
        yield store.put('b')
        assert get_event.triggered
Exemplo n.º 11
0
    def __init__(self, env, equip_df, process_time):
        self.env = env
        # self.machine = simpy.Resource(env, num_machines)
        self.machine = simpy.FilterStore(env, len(equip_df))
        self.process_time = process_time

        for i, rows in equip_df.iterrows():
            self.machine.put({"EquipID": rows["EquipID"], "Chambers": rows["Chambers"]})
Exemplo n.º 12
0
 def __init__(self, env, out_link, index, application_process_time):
     random.seed(random_seed())
     self.env = env
     self.out_link = out_link
     self.in_link = simpy.FilterStore(self.env)
     self.index = index
     self.action = env.process(self.run())
     self.request = Packet()
     self.application_process_time = application_process_time
Exemplo n.º 13
0
    def __init__(self, env, config: CpuConfig, default_val=0):
        self.env = env
        self._access_queue = simpy.Store(env, config.mem_access_queue_size)
        self._access_queue_pop_event = simpy.Event(env)
        self._accesses_in_execution = simpy.FilterStore(env)
        self._accesses_in_execution_pop_event = simpy.Event(env)

        self.id = "MEM"
        self._memory = [default_val] * config.mem_size
        self._log = get_logger(env, self.id)
Exemplo n.º 14
0
 def capacity(self):
     if self.kwargs:
         store = list(self.kwargs.values())[0]
         items = [self.apparatus(i) for i in range(1, self.cap + 1)]
         increase_cap = sum(list(store.values())) - len(store)
         self.cap = self.cap + increase_cap
         self.store = simpy.FilterStore(self.env, capacity=self.cap)
         for i in store:
             for j in range(store[i] - 1):
                 items.append(self.apparatus(int(i)))
         self.store.items = items
Exemplo n.º 15
0
 def __init__(self, env, name, cap, **kwargs):
     self.kwargs = kwargs
     self.apparatus = namedtuple(name, 'num')
     self.env = env
     self.cap = cap
     self.store = simpy.FilterStore(env, capacity=cap)
     self.store.items = [self.apparatus(i) for i in range(1, cap + 1)]
     self.capacity()
     self.requests = dict()
     self.wait_request = dict()
     self.wait_times = dict()
Exemplo n.º 16
0
    def __init__(self, env):
        self.env = env
        self.free = simpy.FilterStore(env)
        self.incidentList = list()
        self.responderList = list()

        for i in xrange(NUM_AMB):
            tempAmb = Responder(self.env, "amb_" + str(i), "amb", STATION,
                                STATION, 0.25)
            self.responderList.append(tempAmb)
            self.free.put(tempAmb)
Exemplo n.º 17
0
 def standard(miners_number=20, days=10):
     # Convert simulation days to seconds
     simulation_time = moment.get_seconds(days)
     try:
         clear_db()
     except ConnectionError:
         return -1
     # Store in redis the simulation event names
     configure_event_names([
         Miner.BLOCK_REQUEST, Miner.BLOCK_RESPONSE, Miner.BLOCK_NEW,
         Miner.HEAD_NEW
     ])
     # Create simpy environment
     env = simpy.Environment()
     store = simpy.FilterStore(env)
     # Create the seed block
     seed_block = Block(None, 0, env.now, -1, 0, 1)
     hashrates = numpy.random.dirichlet(numpy.ones(miners_number), size=1)
     # Create miners
     miners = []
     # This dict is used to store the connections between miners, so they are not created twice
     connections = dict()
     for i in range(0, miners_number):
         miner = Miner(env, store, hashrates[0, i] * Miner.BLOCK_RATE,
                       Miner.VERIFY_RATE, seed_block)
         miners.append(miner)
         connections[miner] = dict()
     # Randomly connect miners
     for i, miner in enumerate(miners):
         miner_connections = numpy.random.choice([True, False],
                                                 miners_number)
         for j, miner_connection in enumerate(miner_connections):
             # Onlye create connection if miner is not self and connection does not already exist
             if i != j and miner_connection == True and j not in connections[
                     miner] and i not in connections[miners[j]]:
                 # Store connection so its not created twice
                 connections[miner][j] = True
                 connections[miners[j]][i] = True
                 Miner.connect(miner, miners[j])
     for miner in miners:
         miner.start()
     start = time.time()
     # Start simulation until limit. Time unit is seconds
     env.run(until=simulation_time)
     end = time.time()
     print("Simulation took: %1.4f seconds" % (end - start))
     # Store in redis simulation days
     store_days(days)
     # After simulation store every miner head, so their chain can be built again
     for miner in miners:
         r.hset("miners:" + repr(miner.id), "head", miner.chain_head)
     # Notify simulation ended
     r.publish(Simulator.PUBSUB_CHANNEL, Simulator.SIMULATION_ENDED)
     return 0
Exemplo n.º 18
0
 def __init__(self, env, logger, network):
     self.env = env
     self.logger = logger
     self.network = network
     self.queue = simpy.FilterStore(self.env)
     self.ID = Node.count
     Node.count += 1
     self.iteration_cnt = 0 # local iteration counter
     self.request_cnt = 0 # count requests processed in each iteration
     self.response_cnt = 0 # count responses in each iteration
     self.neighbors = []
     self.env.process(self.start())
Exemplo n.º 19
0
    def __init__(self, config=None):
        """
        Loads simulation parameters from config.
        Creates simulation objects from map, populating them with components.
        """
        self.CONFIG = 'simulation.json' if config is None else config
        with open(self.CONFIG) as fd:
            config = json.load(fd)
            self.FPS = config.get('FPS', 60)
            self.DEFAULT_LINE_WIDTH = config.get('DLW', 10)
            file = pathlib.Path(config.get('context', '.')) / config.get(
                'map', 'map.drawio')
            self.duration = config.get('duration', -1)

        simulation = map_parser.build_simulation_from_map(file)
        self.world: esper.World = simulation['world']
        self.window = simulation['window']
        self.batch = simulation['batch']
        _, self.window_dimensions, _ = simulation['window_props']
        self.draw2ent = simulation['draw_map']
        self.objects = simulation['objects']
        # Global inventory
        self.interactive = self.world.component_for_entity(1,
                                                           Inventory).objects

        print(f"==> Simulation objects")
        for oid, objId in self.objects:
            entity = self.draw2ent.get(objId)
            print(f"OBJ #{oid} (draw {objId}). Type {entity[1]['type']}")
            # print(f"Object has components {world.components_for_entity(oid)}")
            if self.world.has_component(oid, Map):
                ent_map = self.world.component_for_entity(oid, Map)
                print("\tAvailable paths:")
                for idx, key in enumerate(ent_map.paths.keys()):
                    print(f"\t{idx}. {key}")
            if self.world.has_component(oid, Script):
                script = self.world.component_for_entity(oid, Script)
                print(script)

        self.EXIT = False
        self.ENV = simpy.Environment()
        self.EXIT_EVENT = self.ENV.event()
        self.KWARGS = {
            "ENV": self.ENV,
            "WORLD": self.world,
            "_KILLSWITCH": self.ENV.event() if self.duration > 0 else None,
            "EVENT_STORE": simpy.FilterStore(self.ENV),
            # Pyglet specific things (for the re-create entity)
            "BATCH": self.batch,
            "WINDOW_OPTIONS":
            (self.window_dimensions, self.DEFAULT_LINE_WIDTH),
        }
Exemplo n.º 20
0
 def __init__(self, file_path, feasable_solution, Workstation, data):
     self.env = simpy.Environment()
     self.que = simpy.FilterStore(self.env)
     self.switch = {}
     self.file_path = file_path
     self.feasable_solution = feasable_solution
     self.Workstation = Workstation
     self.data = data
     self.file = pd.read_excel(file_path,
                               sheet_name='Line_Details',
                               skiprows=3,
                               usecols='B:F')
     self.unique_task = self.file['Task Number'].tolist()
     self.followers = self.data.groupby(
         ['Next Task'])['Task Number'].count().to_dict()
Exemplo n.º 21
0
def simulate_day(num, Appliance):

	'''
	this is a generator that "holds" num number of trials of a single day of simulation
	inputs:
		num - count of simulations to do
		Appliance - class Appliance, acts as storage for cumulative values for each day/simulation
	
	outputs:
		Appliance.DR_energy - class property of Appliance, tallies 
		Appliance.kWh_per_hour - 24 slot list holding power usage per hour
		Appliance.curtailed - 24 slot list holding curtailed usage per hour
	'''

	i = 0

	while i < num:

		# header for looks
		now = datetime.datetime.now()
		print ""
		print ("~~~~~~~~~~~~~~~~~~~~~~~~~~~")
		print ("DR SIM #%s START AT %s:%02d:%02d" % (i+1, now.hour, now.minute, now.second))
		print ("~~~~~~~~~~~~~~~~~~~~~~~~~~~")
		print ""
		print "Daily Schedule"
		print "--------------"
		print ""

		# initialize simpy environment
		env = simpy.Environment()
		# store will hold names of unused appliances
		apps = simpy.FilterStore(env)
		# creates HEAD device, which creates all of the appliances
		HEAD = HomeEnergyAutomationDevice(APPLIANCES, env, apps)
		# run simulation for 1 day
		env.run(until=24)

		# return DR_energy for run i, as well as scheduled demand and curtailed demand
		yield Appliance.DR_energy, Appliance.kWh_per_hour, Appliance.curtailed

		# reset for next simulation
		Appliance.kWh_per_hour = [0] * 24
		Appliance.DR_energy = 0
		Appliance.curtailed = [0] * 24

		i += 1
Exemplo n.º 22
0
    def __init__(self, ontology_uri=None, agent_class=None, behavior_class=None, uri_property=None, simulator=None, simulate=None):
        self.reasoner = Reasoner(ontology_uri)

        self.behavior_class = self.reasoner.onto[behavior_class]
        self.uri_property = uri_property
        self.simulator = self.reasoner.onto[simulator]
        self.simulate = self.reasoner.onto[simulate]

        self.env = simpy.Environment()        
        self.action = self.env.process(self.__run())
        self.agents = simpy.FilterStore(self.env)

        agents = list(self.reasoner.onto[agent_class].instances())
        for agent in agents:
            self.agents.put(agent)

        self.agents.put(self.simulator)
Exemplo n.º 23
0
 def __init__(self,
              env: simpy.Environment,
              topology: Topology,
              enable_ack=True,
              csv_file: Optional[TextIO] = None,
              save_history=False):
     self.env = env
     self.stores = defaultdict(lambda: simpy.FilterStore(env))
     self.topology = topology
     self.history = list()
     self.enable_ack = enable_ack
     self.csv_file = csv_file
     if csv_file:
         self.csv_writer = csv.DictWriter(csv_file,
                                          fieldnames=csv_fields.keys())
         self.csv_writer.writeheader()
     self.save_history = save_history
    def __init__(self,
                 env,
                 name,
                 avg_process_time,
                 no_of_cores=4,
                 max_pool_size=100,
                 time_slice=1,
                 cs_overhead=0.1,
                 process_time_dist=lambda x: random.expovariate(1 / x)):
        """
        initialized an server instance
        :param env: simply environment
        :param avg_process_time: average processing time
        :param no_of_cores: number of cores assigned to the server
        :param max_pool_size: size of the main thread pool
        :param time_slice: time given to a single thread by the scheduler
        :param cs_overhead: context switch overhead
        :param process_time_dist : allows passing custom process time distributions
        :param name: name of the instance
        """

        # workload params
        self.avg_process_time = avg_process_time
        self.no_of_cores = no_of_cores

        self.max_pool_size = max_pool_size
        self.time_slice = time_slice
        self.cs_overhead = cs_overhead
        self.process_time_dist = process_time_dist

        # application params
        self.task_queue = [self]

        super().__init__(env, name)
        self.name = "server-%s" % str(self.name)

        # current thread count
        self.thread_count = 0

        # store responses
        self.pool_queue = simpy.Store(env)
        self.response_queue = simpy.FilterStore(env)

        # should be initiated
        self.out_pipe = None
Exemplo n.º 25
0
def test_filter_calls_worst_case(env):
    """In the worst case the filter function is being called for items multiple
    times."""

    log = []
    store = simpy.FilterStore(env)

    def putter(store):
        for i in range(4):
            log.append(f'put {i}')
            yield store.put(i)

    def log_filter(item):
        log.append(f'check {item}')
        return item >= 3

    def getter(store):
        log.append(f'get {yield store.get(log_filter)}')

    env.process(getter(store))
    env.process(putter(store))
    env.run()

    # The filter function is repeatedly called for every item in the store
    # until a match is found.
    assert log == [
        'put 0',
        'check 0',
        'put 1',
        'check 0',
        'check 1',
        'put 2',
        'check 0',
        'check 1',
        'check 2',
        'put 3',
        'check 0',
        'check 1',
        'check 2',
        'check 3',
        'get 3',
    ]
Exemplo n.º 26
0
def test_filter_calls_best_case(env):
    """The filter function is called every item in the store until a match is
    found. In the best case the first item already matches."""
    log = []

    def log_filter(item):
        log.append(f'check {item}')
        return True

    store = simpy.FilterStore(env)
    store.items = [1, 2, 3]

    def getter(store):
        log.append(f'get {yield store.get(log_filter)}')
        log.append(f'get {yield store.get(log_filter)}')
        log.append(f'get {yield store.get(log_filter)}')

    env.process(getter(store))
    env.run()

    assert log == ['check 1', 'get 1', 'check 2', 'get 2', 'check 3', 'get 3']
Exemplo n.º 27
0
def test_case_2(env):
    """Simple test case: 1 heat source introduced during the scenario and turned
    on a little later. After reaching new equilibrium, it is turned off.
    Initial room temp is 290K with 288K as the low equilibrium.
    """
    room = ContainingSpace(env,288,290,(5*5*3))
    event_queue = simpy.FilterStore(env)
    hs_1 = CbHeater(env,'heater1',room,event_queue,
                    ['ping','heat_on','heat_off'],60,300)
    tmeter1 = CbThermometer(env,'thermometer1',room,event_queue,['ping'],20)
    heat_source_actions = [
        (100,'add',hs_1),     # add and turn on
        (300,'change',hs_1)   # turn off
    ]                
    env.process(heat_source_activity(env,room,event_queue,heat_source_actions))
    env.process(temp_listener(event_queue))
    # Execute!
    print('==== Start of test_case_2 ====')
    env.run(until=SIM_TIME)
    # End of run
    print('#### End of test_case_2 after %d seconds ####' % SIM_TIME)
Exemplo n.º 28
0
def simulation_run(factor_config: object, noise_config: object):
    """
    returns target
    """
    targets = Targets()
    product_mixture = noise_config.product_mix
    product_mixture = list(
        map(lambda x: x / sum(product_mixture), product_mixture))
    for repl in range(StaticParameter.REPLICATIONS):
        Machine = namedtuple('Machine', 'proc_time_mean')
        machines = [
            Machine(proc_time_mean)
            for proc_time_mean in StaticParameter.MACHINE_PROCESSING_MEAN
        ]
        machines = machines[:factor_config.num_machines]
        process_job.count = 0
        drain.count = 0
        env = simpy.Environment()
        buffer = simpy.Resource(env, capacity=factor_config.buffer_size)
        machine_shop = simpy.FilterStore(env,
                                         capacity=factor_config.num_machines)
        machine_shop.items = machines
        testing_station = simpy.Resource(
            env, capacity=factor_config.num_testing_station)
        env.process(
            job_source(env,
                       machines=machine_shop,
                       buffer=buffer,
                       testing_station=testing_station,
                       product_mixture=product_mixture,
                       targets=targets))
        env.run(until=StaticParameter.SIM_TIME)
        targets.THROUGHPUT.append(drain.count)
        targets.CYCLE_TIME.append(
            sum(targets.CYCLE_TIME) / len(targets.CYCLE_TIME))

    # print(f"mean throughput: {sum(targets.THROUGHPUT) / len(targets.THROUGHPUT)}")
    # print(f"mean cycle time: {sum(targets.CYCLE_TIME) / len(targets.CYCLE_TIME)}")
    return sum(targets.THROUGHPUT) / len(targets.THROUGHPUT)
Exemplo n.º 29
0
def setup(env, registration, exam_room, staff):
    global patients_arrived
    # refrash doctors on new shift
    next_shift = False

    while True:
        time = env.now
        # check to see if we are above 8 hours - if we are change the doctors
        if not next_shift and time > 8 * 60:
            with staff.get(lambda doctor: doctor.lunchTaken == False) as doc:
                results = yield doc | env.timeout(0)
                if doc not in results:
                    print("---------SHIFT CHANGE--------")
                    next_shift = True
                    staff = simpy.FilterStore(env, 3)
                    doc_a = Staff(env, 0)
                    doc_b = Staff(env, 1)
                    doc_c = Staff(env, 2)
                    staff.put(doc_a)
                    staff.put(doc_b)
                    staff.put(doc_c)
        # Check if there is any free doctors and it is during a feasible lunch time - see if any doctors need to take thir lunch
        if (len(staff.items) != 0 and time < 8 * 60 and time > 3.5 * 60
                or time > (8 + 3.5) * 60):
            with staff.get(lambda doctor: doctor.lunchTaken == False) as doc:
                results = yield doc | env.timeout(0)
                if doc in results:
                    print(results[doc])
                    env.process(results[doc].lunch())
                    yield staff.put(results[doc])

        yield env.timeout(random.expovariate(1 / IAT_MEAN))

        new_patient = Patient(env, patients_arrived)

        env.process(patient(env, new_patient, registration, exam_room, staff))
        patients_arrived += 1
class Package:

    def __init__(self, good_id, from_w, env, arrive_time, attr:dict, io_rules):

        self.good_id = good_id
        self.from_w = from_w
        self.env = env
        self.arrive_time = arrive_time
        self.attr = attr
        self.io_rules = io_rules

    def unload(self, ms):
        # todo: add logic
        # package arrive time
        yield self.env.timeout(self.arrive_time)
        print(f"good {self.good_id} arrive at {self.env.now}")
        machine = yield ms.get(lambda machine: machine.machine_name == 'unload')
        # todo add process
        # 1. checking & customs
        probra_chk = random.uniform(0,100)  # 0<= x <100, float
        probra_cutms = random.uniform(0,100)  # 0<= x <100, float

        if probra_chk <= 0.05:
            yield self.env.timeout(1800)
        if probra_cutms <= 0.02:
            yield self.env.timeout(3600)


        yield ms.put(machine)


    def pre_sort_infeed(self):
        pass

    def pre_sort(self):
        pass

    def pre_sort_outlet(self):
        pass

    def second_sort_infeed(self):

    def second_sort(self):
        pass

    def second_sort_outlet(self):
        pass

    def s_module(self):
        pass

    def reload(self):
        pass

    def run(self, ms):

        self.unload(ms)




if __name__ == "__main__":

    env = simpy.Environment()

    # todo: lots of machine need to instanced

    # no capacity
    m1 = Machine(id='R_dock1', dock_name='R', machine_name='unload',)
    # with capacity
    m2 = Machine(id='R_presort_infeed_10', dock_name='R',
                 machine_name='preosrt_infeed',
                 capacity=12)

    machines = simpy.FilterStore(env)
    machines.items = [m1, m2]

    packages = [Package(good_id=i, env=env, arrive_time= i * 3) for i in range(30)]

    for package in packages:
        env.process(package.unload(machines))

    env.run()