Esempio n. 1
0
def merge_with_yaml(yaml_filename):
    """Load a yaml config file and merge it into the global config object"""
    global _g_conf
    with open(yaml_filename, 'r') as f:

        yaml_file = yaml.load(f)

        yaml_cfg = AttributeDict(yaml_file)

    _merge_a_into_b(yaml_cfg, _g_conf)

    path_parts = os.path.split(yaml_filename)
    _g_conf.EXPERIMENT_BATCH_NAME = os.path.split(path_parts[-2])[-1]
    _g_conf.EXPERIMENT_NAME = path_parts[-1].split('.')[-2]
    _g_conf.EXPERIMENT_GENERATED_NAME = generate_name(_g_conf)
Esempio n. 2
0
def merge_with_yaml(yaml_filename):
    """Load a yaml config file and merge it into the global config object"""
    global _g_conf
    with open(yaml_filename, 'r') as f:
        yaml_file = yaml.load(f, Loader=yaml.FullLoader)
        yaml_cfg = AttributeDict(yaml_file)

    _merge_a_into_b(yaml_cfg, _g_conf)  # merge YAML config into the global one

    path_parts = os.path.split(yaml_filename)
    _g_conf.EXPERIMENT_BATCH_NAME = os.path.split(path_parts[-2])[-1]
    _g_conf.EXPERIMENT_NAME = path_parts[-1].split('.')[-2]
    _g_conf.EXPERIMENT_GENERATED_NAME = generate_name(_g_conf)
    _g_conf.SAVE_SCHEDULE = eval(_g_conf.SAVE_SCHEDULE)
    _g_conf.TEST_SCHEDULE = eval(_g_conf.TEST_SCHEDULE)
def merge_with_yaml(yaml_filename, encoder_params=None):
    """Load a yaml config file and merge it into the global config object"""
    global _g_conf
    with open(yaml_filename, 'r') as f:

        yaml_file = yaml.load(f)

        yaml_cfg = AttributeDict(yaml_file)

    path_parts = os.path.split(yaml_filename)
    if encoder_params is not None:
        _g_conf.EXPERIMENT_BATCH_NAME = os.path.split(path_parts[-2])[-1]
        _g_conf.EXPERIMENT_NAME = path_parts[-1].split('.')[-2] + '_' + str(
            encoder_params['encoder_checkpoint'])
        _g_conf.EXPERIMENT_GENERATED_NAME = generate_name(_g_conf)

    else:
        _g_conf.EXPERIMENT_BATCH_NAME = os.path.split(path_parts[-2])[-1]
        _g_conf.EXPERIMENT_NAME = path_parts[-1].split('.')[-2]
        _g_conf.EXPERIMENT_GENERATED_NAME = generate_name(_g_conf)

    _merge_a_into_b(yaml_cfg, _g_conf)
Esempio n. 4
0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

from ast import literal_eval
from coilutils import AttributeDict
import copy
import numpy as np
import os
import yaml

from configs.namer import generate_name
from logger.coil_logger import create_log, add_message

_g_conf = AttributeDict()
_g_conf.immutable(False)
"""#### GENERAL CONFIGURATION PARAMETERS ####"""
_g_conf.NUMBER_OF_LOADING_WORKERS = 12
_g_conf.FINISH_ON_VALIDATION_STALE = None
"""#### INPUT RELATED CONFIGURATION PARAMETERS ####"""
_g_conf.SENSORS = {'rgb': (3, 88, 200)}
_g_conf.MEASUREMENTS = {'float_data': (31)}
_g_conf.TARGETS = ['steer', 'throttle', 'brake']
_g_conf.INPUTS = ['speed_module']
_g_conf.INTENTIONS = []
_g_conf.BALANCE_DATA = True
_g_conf.STEERING_DIVISION = [0.05, 0.05, 0.1, 0.3, 0.3, 0.1, 0.05, 0.05]
_g_conf.PEDESTRIAN_PERCENTAGE = 0
_g_conf.SPEED_DIVISION = []
_g_conf.LABELS_DIVISION = [[0, 2, 5], [3], [4]]
Esempio n. 5
0
# pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
# # 2. overwrite entries in the existing state dict
# model_dict.update(pretrained_dict) 
# # 3. load the new state dict
# model.load_state_dict(model_dict)


torch.set_default_dtype(torch.float32)
torch.set_default_tensor_type('torch.cuda.FloatTensor')

# read yaml file
yaml_filename = 'coil_configs.yaml'
with open(yaml_filename, 'r') as f:
    # TODO: combine all know configuraitons into one file and load it into a dict
    yaml_file = yaml.load(f, Loader=yaml.FullLoader)
    yaml_cfg = AttributeDict(yaml_file)

# # load checkpoint dict
# checkpoint = torch.load(os.path.join('/home/ruihan/scenario_runner/models/CoIL/'+str(180000)+'.pth'))

# # load model
# model = CoILModel(yaml_cfg.MODEL_TYPE, yaml_cfg.MODEL_CONFIGURATION)
# model.cuda()
# checkpoint_iteration = checkpoint['iteration']
# print("Pretrained CoIL loaded ", checkpoint_iteration)
# model.load_state_dict(checkpoint['state_dict'])
# model.eval()
# torch.save(model.state_dict(), '/home/ruihan/scenario_runner/models/CoIL/CoIL_180000.pth' )

print("load empty CoIlModel")
modelB = CoILICRA(yaml_cfg.MODEL_CONFIGURATION)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

from ast import literal_eval
from coilutils import AttributeDict
import copy
import numpy as np
import os
import yaml

from configs.namer import generate_name
from logger.coil_logger import create_log, add_message

_g_conf = AttributeDict()

_g_conf.immutable(False)
"""#### GENERAL CONFIGURATION PARAMETERS ####"""
_g_conf.NUMBER_OF_LOADING_WORKERS = 12
_g_conf.FINISH_ON_VALIDATION_STALE = None
"""#### INPUT RELATED CONFIGURATION PARAMETERS ####"""
_g_conf.SENSORS = {'rgb': (3, 88, 200)}
_g_conf.MEASUREMENTS = {'float_data': (31)}
_g_conf.TARGETS = ['steer', 'throttle', 'brake']
_g_conf.AFFORDANCES_TARGETS = {}
_g_conf.INPUTS = []
_g_conf.TARGETS_AUX = None
_g_conf.COMMANDS = None
_g_conf.INTENTIONS = []
_g_conf.BALANCE_DATA = True