def load(config, agent, epoch, from_disk=True): config = config['ai'] if not config['enabled']: logging.info("ai disabled") return False logging.info("[ai] bootstrapping dependencies ...") from stable_baselines import A2C from stable_baselines.common.policies import MlpLstmPolicy from stable_baselines.common.vec_env import DummyVecEnv import pwnagotchi.ai.gym as wrappers env = wrappers.Environment(agent, epoch) env = DummyVecEnv([lambda: env]) logging.info("[ai] bootstrapping model ...") a2c = A2C(MlpLstmPolicy, env, **config['params']) if from_disk and os.path.exists(config['path']): logging.info("[ai] loading %s ..." % config['path']) a2c.load(config['path'], env) else: logging.info("[ai] model created:") for key, value in config['params'].items(): logging.info(" %s: %s" % (key, value)) return a2c
def load(config, agent, epoch, from_disk=True): config = config['ai'] if not config['enabled']: logging.info("ai disabled") return False try: begin = time.time() logging.info("[ai] bootstrapping dependencies ...") start = time.time() from stable_baselines import A2C logging.debug("[ai] A2C imported in %.2fs" % (time.time() - start)) start = time.time() from stable_baselines.common.policies import MlpLstmPolicy logging.debug("[ai] MlpLstmPolicy imported in %.2fs" % (time.time() - start)) start = time.time() from stable_baselines.common.vec_env import DummyVecEnv logging.debug("[ai] DummyVecEnv imported in %.2fs" % (time.time() - start)) start = time.time() import pwnagotchi.ai.gym as wrappers logging.debug("[ai] gym wrapper imported in %.2fs" % (time.time() - start)) env = wrappers.Environment(agent, epoch) env = DummyVecEnv([lambda: env]) logging.info("[ai] creating model ...") start = time.time() a2c = A2C(MlpLstmPolicy, env, **config['params']) logging.debug("[ai] A2C created in %.2fs" % (time.time() - start)) if from_disk and os.path.exists(config['path']): logging.info("[ai] loading %s ..." % config['path']) start = time.time() a2c.load(config['path'], env) logging.debug("[ai] A2C loaded in %.2fs" % (time.time() - start)) else: logging.info("[ai] model created:") for key, value in config['params'].items(): logging.info(" %s: %s" % (key, value)) logging.debug("[ai] total loading time is %.2fs" % (time.time() - begin)) return a2c except Exception as e: logging.exception("error while starting AI") logging.warning("[ai] AI not loaded!") return False
'missed_interactions': missed, 'num_hops': random.randint(1, wifi.NumChannels), 'num_deauths': num_deauth, 'num_associations': num_assocs, 'num_handshakes': random.randint(0, tot_interactions), 'cpu_load': .5 + random.random(), 'mem_usage': .5 + random.random(), 'temperature': random.randint(40, 60) } self.epoch += 1 return data epoch_mock = EpochMock() env = wrappers.Environment(epoch_mock) env = DummyVecEnv([lambda: env]) print("learning from random data ...") model = A2C(MlpPolicy, env, verbose=1) model.learn(total_timesteps=10) model.save("test.nn") print("running ...") obs = env.reset() for i in range(1000): env.render() action, _states = model.predict(obs) obs, rewards, dones, info = env.step(action)