from toolz.functoolz import compose from gzutils.gzutils import DotDict, Logging, get_output_dir from animatai.utils import vector_add from animatai.agents import Agent, Obstacle, Thing, XYEnvironment from animatai.network import MotorNetwork, Network from animatai.network_rl import MotorModel, NetworkModel, NetworkDP, NetworkQLearningAgent # Setup logging # ============= random.seed(1) OUTPUT_DIR = get_output_dir() DEBUG_MODE = True l = Logging('grid', DEBUG_MODE) # Environment # =========== class Energy(Thing): pass class Water(Thing): pass class Landmark(Thing): pass
# Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # import random from ecosystem.agents import Agent from ecosystem.network_rl import BasicAgent from gzutils.gzutils import DotDict, Logging from .sea import Sea, Sing, Squid # Setup logging # ============= DEBUG_MODE = True l = Logging('random_mom_and_calf', DEBUG_MODE) # Mom that moves by random until squid is found. Move forward when there is # squid and sing. def mom_program(active_sensors, _): action, nsaction = None, None if 's' in active_sensors: l.info('--- MOM FOUND SQUID, EATING AND SINGING! ---') action = 'eat_and_forward' nsaction = 'sing' if not action: if random.random() < 0.5:
# pylint: disable=missing-docstring, global-statement, invalid-name, too-few-public-methods, no-self-use # # A random Mother cachelot and calf # # Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # from animatai.agents import Thing, Obstacle, Direction, NonSpatial, XYEnvironment from gzutils.gzutils import DotDict, Logging # Setup constants and logging # =========================== DEBUG_MODE = True l = Logging('sea', DEBUG_MODE) # Classes # ======== class Squid(Thing): pass # action 'sing' creates Song class Song(NonSpatial): pass class Sea(XYEnvironment):
# Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # # Imports # ====== import unittest from gzutils.gzutils import Logging, unpack from animatai.agents import Thing from animatai.network import Network, MotorNetwork # Setup logging # ============= DEBUG_MODE = True l = Logging('test_network', DEBUG_MODE) unpack0 = unpack(0) class Thing1(Thing): pass class Thing2: pass class Thing3: pass
# # Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # import random from gzutils.gzutils import DotDict, Logging from agents import Agent, Thing, Direction, NonSpatial, XYEnvironment from utils import vector_add # Setup logging # ============= DEBUG_MODE = True l = Logging('blind_dog', DEBUG_MODE) random.seed('blind-dog') # Configuration of rendering # ========================= # # left: (-1,0), right: (1,0), up: (0,-1), down: (0,1) MOVES = [(0, -1), (0, 1)] fido_start_pos = (0, 0) dido_start_pos = (0, 0) OPTIONS = DotDict({ 'terrain': 'G\nG\nG\nG\nG\nG\nG\nG\nG\nG'.split('\n'),
# # Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # # Imports # ====== import unittest from gzutils.gzutils import Logging from animatai.mdp import MDP, value_iteration, value_iteration2, best_policy # Setup logging # ============= DEBUG_MODE = False l = Logging('test_mdp', DEBUG_MODE) # Unit tests # ========== # # Use the grid world from the AIMA book but give the states the names: 'a', ..., 'k'. # # +------+------+------+------+ # | a | b | c | d | # +------+------+------+------+ # | e | | f | g | # +------+------+------+------+ # | h | i | j | k | # +------+------+------+------+ #
# Imports # ====== import unittest from ecosystem.agents import Agent, Obstacle from gzutils.gzutils import Logging from sea import Sea, Squid # Setup logging # ============= DEBUG_MODE = True l = Logging('test_cachalot', DEBUG_MODE) # Unit tests # ========== lane = ('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\n' + ' \n' + ' ssss ssss \n') # the mother and calf have separate and identical lanes things = lane + lane + 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' options = { 'things': things.split('\n'), 'width': 50, 'height': 7,
# Imports # ====== import unittest from gzutils.gzutils import get_output_dir, Logging from animatai.agents import Agent, Thing, XYEnvironment from animatai.history import History # Setup logging # ============= DEBUG_MODE = True l = Logging('test_stats', DEBUG_MODE) output_dir = get_output_dir('/../output', file=__file__) class TestStats(unittest.TestCase): def setUp(self): l.info('Testing stats...') def tearDown(self): l.info('...done with test_stats.') def test_collect_history(self): hist1 = History() hist2 = History() hist3 = History()
# Imports # ======= import random from collections import defaultdict from gzutils.gzutils import DotDict, Logging, save_csv_file from .utils import turn_heading, distance_squared # Setup constants and logging # =========================== PERCEPTIBLE_DISTANCE = 0 DEBUG_MODE = True l = Logging('agents', DEBUG_MODE) # The code # ========= # Objects from this class are identified with their name (as opposed to the reference). class NamedObject: def __init__(self, name=None): self.__name__ = name def __repr__(self): return '<{} ({})>'.format(self.__name__, self.__class__.__name__) def __eq__(self, other): return self.__name__ == other.__name__
# Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # # Imports # ======= import os from itertools import chain from gzutils.gzutils import Logging, get_output_dir, save_csv_file # Setup logging # ============= DEBUG_MODE = True l = Logging('history', DEBUG_MODE) # The code # ========= # # A singleton collecting history that can be saved to a CSV file # # __history: {dataset: [(col1, ..., coln)]} - dict list with tuples (or lists) # __headers: {dataset: [header1, ..., headern]} - dict with column headers # __filenames: {filename: [dataset1, ..., datasetn]} - dict mapping datasets to filenaames # __env_classes: {env_class_name: [(class_name1, count1, ..., class_namen, countn)]} # class History:
# pylint: disable=missing-docstring, invalid-name, too-many-arguments # # A Markov Decision Process (MDP) # # Copyright (C) 2017 Jonas Colmsjö, Claes Strannegård # from gzutils.gzutils import Logging from .utils import argmax # Setup logging # ============= DEBUG_MODE = False l = Logging('mdp', DEBUG_MODE) # Classes # ======= class MDP: def __init__(self, init, actlist, terminals, transitions, states, rewards, gamma=.9): if not 0 < gamma <= 1:
# Imports # ====== import unittest from math import isclose from gzutils.gzutils import Logging from animatai.agents import Agent, Thing, Direction, XYEnvironment from animatai.network import Network # Setup logging # ============= DEBUG_MODE = True l = Logging('test_agents', DEBUG_MODE) # Unit tests # ========== class TestAgents(unittest.TestCase): # pylint: disable=too-many-locals def setUp(self): l.info('Testing agents...') def test_move_forward(self): l.info('test_move_forward')
# pylint: disable=missing-docstring, global-statement, invalid-name # # Imports # ======= import math import os from random import random from gzutils.gzutils import Logging, get_output_dir # Setup logging # ============= DEBUG_MODE = True l = Logging('network', DEBUG_MODE) # # type - a string speciying the type of node (for information only) # func - the function to apply to the percept:[(Thing|NonSpatial, radius)], # vars:[any type] -> (boolean, [any type]) # vars - the initial state the function executes in, necessary for SEQ (and other # function that require a state) # children - indexes if the nodes that the nodes takes input from (used in the update function) # class Node: # pylint: disable=too-few-public-methods def __init__(self, type_, func, vars_, children): self.type_ = type_ self.vars_ = vars_
# ======= import math import random from collections import defaultdict from gzutils.gzutils import DefaultDict, Logging from .agents import Agent # Setup logging # ============= DEBUG_MODE = True l = Logging('network_rl', DEBUG_MODE) # Network model used by the Network Q-learning Agents # =================================================== # # A variant of a MDP where: # - actions are generated from active motors - frozenset([m0,...,mn]) # - states are represented with active network nodes - frozenset([s1,..., sn]) # - statuses are used instead of terminal states. Any status less than or equal # to zero is equivalent to a terminal state - [(name, float)] # - init is the initial state of the sensors # # Use like this: NetworkModel({frozenset([0]): state, ...})
import asyncio import websockets import config from gzutils.gzutils import Logging from gzutils.gzutils import writef # Constants and functions # ======================= # number of second between packges sent to the client SEND_RATE = 1 / 100000 DEBUG_MODE = True l = Logging('wsserver', DEBUG_MODE) # Websockets server class # ======================== class WsServer: def __init__(self, message_handler): self.connected = False self.queue = [] self.message_handler = message_handler # install a SIGALRM handler and emit SIGALRM every 1 sec # signum, frame def sig_handler(self, _, _2): writef('.')
from toolz.functoolz import compose from gzutils.gzutils import DefaultDict, Logging, unpack, get_output_dir from animatai.agents import Agent from animatai.network import Network, MotorNetwork from animatai.network_rl import MotorModel, NetworkModel, NetworkDP, NetworkQLearningAgent from sea import Sea, Song, Squid from random_mom_and_calf_config import mom_start_pos, calf_start_pos, OPTIONS # Setup logging # ============= DEBUG_MODE = True l = Logging('mom_and_calf', DEBUG_MODE) # Mom and Calf # =========== class Mom(Agent): def __init__(self, objectives): # pylint: disable=line-too-long, too-many-locals # program=None super().__init__(None, 'mom') # Motors and actions motors = ['sing_eat_and_forward', 'forward', 'dive_and_forward',