def list_models(ctx, **kwargs): """List available trainable models.""" lst = models.list_models(True) for m in lst: id, doc = m doc = textwrap.indent(doc, " ") print(" • {0}:\n{1}".format(id, doc))
def setup_pilots(self, rover): manual_pilots = [] try: # intenta encontrar un mando RC y lo define en los log. f710 = F710() # manual_pilots.append(f710) logging.info("Loaded F710 Gamepad module") except Exception as e: f710 = None if self.board_type is 'navio': #Cant get RC for Navio to work yet pass elif self.board_type is 'navio2': # si es la placa navio2: #manual_pilots.append(RC()) ; este cacho sobreescribe logging.info("Non loaded RC module") rover.manual_pilots = manual_pilots # Si no detecta ninguna configuracion manual para el control, pasa a modo automatico. auto_pilots = [] model_paths = list_models() for model_path, model_name in model_paths: logging.info("Loading model " + model_name) keras = KerasCategorical(model_path, name=model_name) auto_pilots.append(keras) rover.auto_pilots = auto_pilots
def setup_pilots(self, rover): pilots = [] try: f710 = F710() pilots.append(f710) except Exception as e: f710 = None logging.info("Unable to load F710 Gamepad") try: rc = RC() pilots.append(rc) except Exception as e: rc = None logging.info("Unable to load RC") model_paths = list_models() for model_path, model_name in model_paths: keras = KerasCategorical(model_path, name=model_name) logging.info("Loading model " + model_name) keras.load() if f710: pilots.append(MixedF710(keras, f710)) if rc: pilots.append(MixedRC(keras, rc)) rover.pilots = pilots rover.set_pilot(0)
def setup_pilots(self, rover): manual_pilots = [] try: f710 = F710() manual_pilots.append(f710) logging.info("Loaded F710 Gamepad module") except Exception as e: f710 = None if self.board_type is 'navio': #Cant get RC for Navio to work yet pass elif self.board_type is 'navio2': manual_pilots.append(RC()) logging.info("Loaded RC module") rover.manual_pilots = manual_pilots auto_pilots = [] model_paths = list_models() for model_path, model_name in model_paths: logging.info("Loading model " + model_name) keras = KerasCategorical(model_path, name=model_name) auto_pilots.append(keras) rover.auto_pilots = auto_pilots
def setup_pilots(self, rover): pilots = [] try: f710 = F710() pilots.append(f710) logging.info("Loaded F710 Gamepad module") except Exception as e: f710 = None if self.board_type is 'navio': rc = RC() pilots.append(rc) logging.info("Loaded RC module") else: rc = None model_paths = list_models() for model_path, model_name in model_paths: logging.info("Loading model " + model_name) keras = KerasCategorical(model_path, name=model_name) if f710: pilots.append(MixedF710(keras, f710)) if rc: pilots.append(MixedRC(keras, rc)) rover.pilots = pilots rover.set_pilot(0)
def test_list_models(self): file = sys.modules[__name__].__file__ path = os.path.dirname(os.path.realpath(file)) f = open(os.path.join(path, "../models/unit_test.h5"), "a") models_list = list_models() self.assertTrue(models_list)
import logging import models from sys import path path.insert(1, './rest_gae') from rest_gae import * from rest_gae.users import UserRESTHandler config = {} config['webapp2_extras.sessions'] = { 'secret_key': 'my-super-secret-key', } model_handlers = [] model_list = models.list_models() # automatically create a handler for every model listed in models for model_name in model_list: model_handlers.append(RESTHandler( '/api/' + model_name.lower(), getattr(models, model_name), permissions={ 'GET': PERMISSION_ANYONE, 'POST': PERMISSION_LOGGED_IN_USER, 'PUT': PERMISSION_OWNER_USER, 'DELETE': PERMISSION_ADMIN }, # Will be called for every PUT, right before the model is saved put_callback=lambda model, data: model
from absl import app from absl import flags import models import methods import file_utils import load_datasets from datasets import datasets from metrics import Metrics from checkpoints import CheckpointManager from gpu_memory import set_gpu_memory print (methods.list_methods()) print (models.list_models()) print (datasets.list_datasets()) FLAGS = flags.FLAGS flags.DEFINE_string("modeldir", "example-models", "Directory for saving model files") flags.DEFINE_string("logdir", "example-logs", "Directory for saving log files") flags.DEFINE_enum("method", "yndaws", methods.list_methods(), "What method of domain adaptation to perform (or none)") flags.DEFINE_enum("model", "ynfcn", models.list_models(), "What model to use (note: ignored for vrada/rdann methods)") flags.DEFINE_enum("dataset", "ucihar", datasets.list_datasets(), "What dataset to use (e.g. \"ucihar\")") flags.DEFINE_string("sources", "14", "Which source domains to use (e.g. \"1,2,3\")") flags.DEFINE_string("target", "19", "What target domain to use (e.g. \"4\", can be blank for no target)") flags.DEFINE_string("uid", "0", "A unique ID saved in the log/model folder names to avoid conflicts") flags.DEFINE_integer("ensemble", 1, "Number of models in the ensemble, 1 = no ensemble") flags.DEFINE_integer("steps", 30000, "Number of training steps to run") flags.DEFINE_float("gpumem", 2000, "GPU memory to let TensorFlow use, in MiB (0 for all)") flags.DEFINE_integer("model_steps", 0, "Save the model every so many steps (0 for only when log_val_steps)")
from absl import app from absl import flags import models import methods import file_utils import load_datasets from datasets import datasets from metrics import Metrics from checkpoints import CheckpointManager from gpu_memory import set_gpu_memory print(methods.list_methods()) print(models.list_models()) print(datasets.list_datasets()) FLAGS = flags.FLAGS flags.DEFINE_string("modeldir", "example-models", "Directory for saving model files") flags.DEFINE_string("logdir", "example-logs", "Directory for saving log files") flags.DEFINE_enum("method", "yndaws", methods.list_methods(), "What method of domain adaptation to perform (or none)") flags.DEFINE_enum("model", "ynfcn", models.list_models(), "What model to use (note: ignored for vrada/rdann methods)") flags.DEFINE_enum("dataset", "ucihar", datasets.list_datasets(), "What dataset to use (e.g. \"ucihar\")") flags.DEFINE_string("sources", "14", "Which source domains to use (e.g. \"1,2,3\")") flags.DEFINE_string(
path = Path(self.writer.file_writer.get_logdir()) path /= "checkpoints" path.mkdir(parents=True, exist_ok=True) self.model.save(path, step) def close(self): path = Path(self.writer.file_writer.get_logdir()) path /= "records.json" self.writer.export_scalars_to_json(path) self.writer.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--model', choices=models.list_models(), help='Model to use for training') parser.add_argument('--n-epochs', type=int, default=argparse.SUPPRESS, help='') parser.add_argument('--save-interval', type=int, default=argparse.SUPPRESS, help='') parser.add_argument('--val-interval', type=int, default=argparse.SUPPRESS, help='') parser.add_argument('--batch-size', type=int, default=argparse.SUPPRESS, help='') parser.add_argument('--data-size', type=int, default=argparse.SUPPRESS, help='') parser.add_argument('--lr', type=float, default=argparse.SUPPRESS, help='') parser.add_argument('--checkpoint', type=str, default=argparse.SUPPRESS, help='')