Exemplo n.º 1
0
def main():

    data_filename = "simulators/house/AssetsHouse"
    experiment_name = "house_unet_cb_navigation_gold_goal_no_RNN"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Define log settings
    log_path = experiment + '/train_baseline.log'
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log(
        "----------------------------------------------------------------")
    master_logger.log(
        "                    STARING NEW EXPERIMENT                      ")
    master_logger.log(
        "----------------------------------------------------------------")

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    # Validate the setting
    setup_validator = HouseSetupValidator()
    setup_validator.validate(config, constants)

    # Log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    action_space = ActionSpace(config["action_names"], config["stop_action"],
                               config["use_manipulation"],
                               config["num_manipulation_row"],
                               config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # TODO: Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        # vocab[token] = i
        vocab[i] = token
    # vocab["$UNK$"] = len(vocab_list)
    vocab[len(vocab_list)] = "$UNK$"
    config["vocab_size"] = len(vocab_list) + 1

    # Number of processes
    house_ids = [1, 2, 3, 4, 5]
    num_processes = len(house_ids)

    try:
        # Create the model
        master_logger.log("CREATING MODEL")
        model_type = TmpHouseMisraBaseline  #TmpHouseIncrementalModelOracleGoldProb
        shared_model = model_type(config, constants, use_image=False)
        # model.load_saved_model("./results/paragraph_chaplot_attention/chaplot_model_epoch_3")

        # make the shared model use share memory
        shared_model.share_memory()

        master_logger.log("MODEL CREATED")
        print("Created Model...")

        # Read the dataset
        tune_split, train_split = [], []
        for hid in house_ids:
            all_train_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) +
                "_discrete_train.json", config)
            all_dev_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) +
                "_discrete_dev.json", config)
            # num_tune = int(len(all_train_data) * 0.1)
            # train_split.append(list(all_train_data[num_tune:]))
            # tune_split.append(list(all_train_data[:num_tune]))

            # Extract type of the dataset
            # lines = open("./simulators/house/datapoint_type_house" + str(hid) + "_v5_110.txt").readlines()
            # datapoint_id_type = {}
            # for line in lines:
            #     datapoint_id, datapoint_type = line.split()
            #     datapoint_id_type[int(datapoint_id)] = datapoint_type.strip()

            # Filter manipulation type
            # all_train_data = list(filter(lambda datapoint: datapoint_id_type[datapoint.get_id()] == "navigation", all_train_data))

            train_split.append(all_train_data)
            tune_split.append(all_dev_data)
            # train_split.append(all_train_data)
            # tune_split.append(all_dev_data)

        processes = []

        # Start the training thread(s)
        ports = find_k_ports(num_processes)
        master_logger.log("Found K Ports")
        for i, port in enumerate(ports):
            train_chunk = train_split[i]
            tmp_config = {k: v for k, v in config.items()}
            tmp_config["port"] = port
            tmp_tune_split = tune_split[i]
            print("Client " + str(i) + " getting a validation set of size ",
                  len(tmp_tune_split))
            server = HouseServer(tmp_config, action_space, port)
            master_logger.log("Server Initialized")
            client_logger = multiprocess_logging_manager.get_logger(i)
            p = mp.Process(target=TmpAsynchronousContextualBandit.do_train,
                           args=(house_ids[i], shared_model, tmp_config,
                                 action_space, meta_data_util, constants,
                                 train_chunk, tmp_tune_split, experiment,
                                 experiment_name, i, server, client_logger,
                                 model_type, vocab))
            p.daemon = False
            p.start()
            processes.append(p)

        for p in processes:
            p.join()

    except Exception:
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)
Exemplo n.º 2
0
    # Tensorboard
    tensorboard = Tensorboard(experiment_name)

    # Read the dataset
    train_dataset, tune_dataset = [], []

    dataset = {}

    for house_id in house_ids:

        house_dataset = GoalPredictionHouseSingle360ImageSupervisedLearningFromDisk.parse(
            house_id=house_id, vocab=vocab, size=128)

        all_train_data = DatasetParser.parse(
            data_filename + "/tokenized_house" + str(house_id) +
            "_discrete_train.json", config)
        all_dev_data = DatasetParser.parse(
            data_filename + "/tokenized_house" + str(house_id) +
            "_discrete_dev.json", config)

        train_ids = [dp.datapoint_id for dp in all_train_data]
        dev_ids = [dp.datapoint_id for dp in all_dev_data]

        house_dataset_dict = {}
        for datapoint in house_dataset:
            task_id = datapoint.task_id
            if task_id in house_dataset_dict:
                house_dataset_dict[task_id].append(datapoint)
            else:
                house_dataset_dict[task_id] = [datapoint]
Exemplo n.º 3
0
def main():

    data_filename = "./simulators/house/AssetsHouse"
    experiment_name = "tmp_house_1_debug_manual_control"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Define log settings
    log_path = experiment + '/train_baseline.log'
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log(
        "----------------------------------------------------------------")
    master_logger.log(
        "                    STARING NEW EXPERIMENT                      ")
    master_logger.log(
        "----------------------------------------------------------------")

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    # TODO: HouseSetupValidator()
    # setup_validator = BlocksSetupValidator()
    # setup_validator.validate(config, constants)

    # log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    config["use_manipulation"] = True  # debug manipulation
    action_space = ActionSpace(config["action_names"], config["stop_action"],
                               config["use_manipulation"],
                               config["num_manipulation_row"],
                               config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # TODO: Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        # vocab[token] = i
        vocab[i] = token
    # vocab["$UNK$"] = len(vocab_list)
    vocab[len(vocab_list)] = "$UNK$"
    config["vocab_size"] = len(vocab_list) + 1

    # Number of processes
    house_ids = [1]  # [1,2,3]
    num_processes = len(house_ids)

    try:
        # Create the model
        master_logger.log("CREATING MODEL")
        model_type = TmpHouseIncrementalModelChaplot
        shared_model = model_type(config, constants)
        # model.load_saved_model("./results/paragraph_chaplot_attention/chaplot_model_epoch_3")

        # make the shared model use share memory
        shared_model.share_memory()

        master_logger.log("MODEL CREATED")
        print("Created Model...")

        # Read the dataset
        tune_split, train_split = [], []
        for hid in house_ids:
            all_train_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) +
                "_discrete.json", config)
            all_dev_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) +
                "_discrete_dev.json", config)
            # num_tune = int(len(all_train_data) * 0.1)
            # train_split.append(list(all_train_data[num_tune:]))
            # tune_split.append(list(all_train_data[:num_tune]))

            # Extract type of the dataset
            # lines = open("./simulators/house/datapoint_type_house" + str(hid) + "_v5_110.txt").readlines()
            # datapoint_id_type = {}
            # for line in lines:
            #     datapoint_id, datapoint_type = line.split()
            #     datapoint_id_type[int(datapoint_id)] = datapoint_type.strip()
            #
            # # Filter manipulation type
            # all_train_data = list(
            #     filter(lambda datapoint: datapoint_id_type[datapoint.get_id()] == "navigation", all_train_data))

            all_train_data = all_train_data[0:50]
            train_split.append(all_train_data)
            tune_split.append(all_train_data)
            # train_split.append(all_train_data)
            # tune_split.append(all_dev_data)

        # Launch the server
        ports = find_k_ports(1)
        port = ports[0]
        tmp_config = {k: v for k, v in config.items()}
        tmp_config["port"] = port
        tmp_tune_split = tune_split[0]
        print("Client " + str(0) + " getting a validation set of size ",
              len(tmp_tune_split))
        server = HouseServer(tmp_config, action_space, port)

        house_id = house_ids[0]
        launch_k_unity_builds([tmp_config["port"]],
                              "./house_" + str(house_id) + "_elmer.x86_64",
                              arg_str="--config ./AssetsHouse/config" +
                              str(house_id) + ".json",
                              cwd="./simulators/house/")

        server.initialize_server()

        # Create a local model for rollouts
        local_model = model_type(tmp_config, constants)
        # local_model.train()

        # Create the Agent
        print("STARTING AGENT")
        tmp_agent = TmpHouseAgent(server=server,
                                  model=local_model,
                                  test_policy=None,
                                  action_space=action_space,
                                  meta_data_util=meta_data_util,
                                  config=tmp_config,
                                  constants=constants)
        print("Created Agent...")
        index = 0
        while True:
            print("Giving another data %r ", len(train_split[0]))
            # index = random.randint(0, len(train_split[0]) - 1)
            index = (index + 1) % len(train_split[0])
            print("Dataset id is " + str(train_split[0][index].get_id()))
            tmp_agent.debug_manual_control(train_split[0][index], vocab)
            # tmp_agent.debug_tracking(train_split[0][index], vocab)

    except Exception:
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)
Exemplo n.º 4
0
def main(args):

    experiment_name = "train_house_chaplot_house_baseline_postbugfix"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    data_filename = "simulators/house/AssetsHouse"

    supervised = False

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Define log settings
    log_path = experiment + '/train_baseline.log'
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log("----------------------------------------------------------------")
    master_logger.log("                    STARING NEW EXPERIMENT                      ")
    master_logger.log("----------------------------------------------------------------")

    # Test policy
    test_policy = gp.get_argmax_action

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    setup_validator = HouseSetupValidator()
    setup_validator.validate(config, constants)

    # log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    action_space = ActionSpace(config["action_names"], config["stop_action"], config["use_manipulation"],
                               config["num_manipulation_row"], config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        # vocab[token] = i
        vocab[i] = token
    # vocab["$UNK$"] = len(vocab_list)
    vocab[len(vocab_list)] = "$UNK$"

    args.input_size = config['vocab_size'] + 2

    # Number of processes
    house_ids = [1, 2, 3, 4, 5]
    num_processes = len(house_ids)
    args.num_processes = num_processes

    try:
        # Create the model
        master_logger.log("CREATING MODEL")
        model_type = a3c_lstm_ga_default
        shared_model = model_type(args, action_space=action_space, config=config)
        # shared_model = model_type(config, constants)

        # make the shared model use share memory
        shared_model.share_memory()

        lstm_size = 256
        if isinstance(shared_model, a3c_lstm_ga_concat_gavector):
            lstm_size *= 3
        model = ChaplotBaselineHouse(args, shared_model, config, constants, tensorboard=None,
                                     use_contextual_bandit=False, lstm_size=lstm_size)

        master_logger.log("MODEL CREATED")
        print("Created Model...")

        # Read the dataset
        tune_split, train_split = [], []
        for hid in house_ids:
            all_train_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) + "_discrete_train.json", config)
            all_dev_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) + "_discrete_dev.json", config)

            train_split.append(all_train_data)
            tune_split.append(all_dev_data)

            master_logger.log("Created train dataset of size {} ".format(len(all_train_data)))
            master_logger.log("Created tuning dataset of size {} ".format(len(all_dev_data)))

        # Start the training thread(s)
        ports = find_k_ports(num_processes)
        master_logger.log("Found K Ports")
        processes = []
        for i, port in enumerate(ports):
            train_chunk = train_split[i]
            print("Size of training data: {}".format(len(train_chunk)))
            tmp_config = {k: v for k, v in config.items()}
            tmp_config["port"] = port
            tmp_tune_split = tune_split[i]
            print("Client " + str(house_ids[i]) + " getting a validation set of size ", len(tmp_tune_split))
            server = HouseServer(tmp_config, action_space, port)
            client_logger = multiprocess_logging_manager.get_logger(i)

            # Run the Training
            p = mp.Process(target=ChaplotBaselineHouse.do_train, args=(house_ids[i], model, shared_model, tmp_config,
                                                                       action_space, meta_data_util,
                                                                       constants, train_chunk, tmp_tune_split,
                                                                       experiment, experiment_name, i, server,
                                                                       client_logger, model_type, vocab, args,
                                                                       False, lstm_size))
            p.daemon = False
            p.start()
            processes.append(p)

        for p in processes:
            p.join()

    except Exception:
        # server.kill()
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)
Exemplo n.º 5
0
    for line in f.readlines():
        logging.info(">>> " + line.strip())
logging.info("END SCRIPT CONTENTS")

act_space = ActionSpace(config["action_names"], config["stop_action"])
meta_data_util = MetaDataUtil()

# Create the server
logging.log(logging.DEBUG, "STARTING SERVER")
server = HouseServer(config, act_space, ports[0])
logging.log(logging.DEBUG, "STARTED SERVER")

# Launch the build
launch_k_unity_builds([ports[0]], "./simulators/house_3_elmer.x86_64")
# Launched the build
server.connect()

# Create the agent
logging.log(logging.DEBUG, "STARTING AGENT")
agent = Agent(Agent.ORACLE, server, act_space, meta_data_util)

# Read the house dataset
dev_dataset = DatasetParser.parse("data/house/dataset/house_3_dev.json",
                                  config)
logging.info("Created test dataset of size %d ", len(dev_dataset))

# Test on this dataset
agent.test(dev_dataset)

server.kill()
Exemplo n.º 6
0
def main():

    data_filename = "./simulators/house/AssetsHouse"
    experiment_name = "house_test_most_frequent_test_dataset"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Define log settings
    log_path = experiment + '/test_baseline.log'
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log("----------------------------------------------------------------")
    master_logger.log("                    STARING NEW EXPERIMENT                      ")
    master_logger.log("----------------------------------------------------------------")

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    setup_validator = HouseSetupValidator()
    setup_validator.validate(config, constants)

    # log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    action_space = ActionSpace(config["action_names"], config["stop_action"], config["use_manipulation"],
                               config["num_manipulation_row"], config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # TODO: Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        # vocab[token] = i
        vocab[i] = token
    # vocab["$UNK$"] = len(vocab_list)
    vocab[len(vocab_list)] = "$UNK$"
    config["vocab_size"] = len(vocab_list) + 1

    # Number of processes
    house_ids = [1, 2, 3, 4, 5]
    num_processes = len(house_ids)

    try:
        # Read the dataset
        test_data = []
        for hid in house_ids:
            all_dev_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) + "_discrete_test.json", config)

            # Extract type of the dataset
            # lines = open("./datapoint_type_house" + str(hid) + ".txt").readlines()
            # datapoint_id_type = {}
            # for line in lines:
            #     datapoint_id, datapoint_type = line.split()
            #     datapoint_id_type[int(datapoint_id)] = datapoint_type.strip()

            # Filter manipulation type
            # all_dev_data = filter(lambda datapoint: datapoint_id_type[datapoint.get_id()] == "navigation", all_dev_data)
            test_data.append(list(all_dev_data))

        processes = []

        # Start the testing thread(s)
        ports = find_k_ports(num_processes)
        master_logger.log("Found K Ports")
        for i, port in enumerate(ports):
            test_chunk = test_data[i]  # Simulator i runs house i and uses the dataset for house i
            tmp_config = {k: v for k, v in config.items()}
            tmp_config["port"] = port
            print("Client " + str(i) + " getting test set of size ", len(test_chunk))
            client_logger = multiprocess_logging_manager.get_logger(i)
            p = mp.Process(target=Agent.test_multiprocess, args=(house_ids[i], test_chunk, tmp_config, action_space, port,
                                                                 Agent.MOST_FREQUENT, meta_data_util, constants, vocab,
                                                                 client_logger, None))
            p.daemon = False
            p.start()
            processes.append(p)

        for p in processes:
            p.join()

    except Exception:
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)
Exemplo n.º 7
0
def main():

    data_filename = "simulators/house/AssetsHouse"
    experiment_name = "emnlp-camera-ready-figure-plot"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Define log settings
    log_path = experiment + '/test_baseline_inferred_oos.log'
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log("----------------------------------------------------------------")
    master_logger.log("                    STARING NEW EXPERIMENT                      ")
    master_logger.log("----------------------------------------------------------------")

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    # Validate the setting
    setup_validator = HouseSetupValidator()
    setup_validator.validate(config, constants)

    # Log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    action_space = ActionSpace(config["action_names"], config["stop_action"], config["use_manipulation"],
                               config["num_manipulation_row"], config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # TODO: Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        # vocab[token] = i
        vocab[i] = token
    # vocab["$UNK$"] = len(vocab_list)
    vocab[len(vocab_list)] = "$UNK$"
    config["vocab_size"] = len(vocab_list) + 1

    # Number of processes
    house_ids = [1, 2, 3, 4, 5]
    num_processes = len(house_ids)

    try:
        master_logger.log("CREATING MODEL")

        # Create the goal prediction model
        # shared_goal_prediction_model = IncrementalModelAttentionChaplotResNet(
        #     config, constants, final_model_type="m4jksum1", final_dimension=(64, 32, 32 * 6))
        shared_goal_prediction_model = IncrementalModelAttentionChaplotResNet(
            config, constants, final_model_type="unet-positional-encoding", final_dimension=(64, 32, 32 * 6))
        shared_goal_prediction_model.load_saved_model(
            "./results/house_goal_prediction/goal_prediction_single_supervised_epoch_4")
        # shared_goal_prediction_model.load_saved_model(
        #     "./results/train_house_goal_prediction_m4jksum1_repeat/goal_prediction_single_supervised_epoch_4")
        # shared_goal_prediction_model.load_saved_model(
        #     "./results/train_house_two_stage_model/predictor_contextual_bandit_2_epoch_2")
        # shared_goal_prediction_model.load_saved_model(
        #     "./results/train_house_goal_prediction_dummy_token/goal_prediction_single_supervised_epoch_9")

        # Create the navigation model
        model_type = TmpHouseIncrementalModelOracleGoldProb  # TmpHouseIncrementalModelChaplot
        shared_navigator_model = model_type(config, constants, use_image=False)
        # shared_navigator_model.load_saved_model(
        #     "./results/train_house_two_stage_model/navigator_contextual_bandit_2_epoch_2")
        shared_navigator_model.load_saved_model(
            "./results/house_unet_cb_navigation_gold_goal/contextual_bandit_0_epoch_5")
        # shared_navigator_model.load_saved_model(
        #     "./results/house_unet_cb_navigation_gold_goal_no_RNN/contextual_bandit_0_epoch_10")

        # Create the action type model
        shared_action_type_model = ActionTypeModule()
        shared_action_type_model.cuda()
        shared_action_type_model.load_state_dict(
            torch.load("./results/train_house_action_types/goal_prediction_single_supervised_epoch_7"))

        # make the shared models use share memory
        shared_goal_prediction_model.share_memory()
        shared_navigator_model.share_memory()
        shared_action_type_model.share_memory()

        master_logger.log("MODEL CREATED")
        print("Created Model...")

        # Read the dataset
        test_split = []
        for hid in house_ids:
            all_test_data = DatasetParser.parse(
                data_filename + "/tokenized_house" + str(hid) + "_discrete_dev.json", config)

            # # Extract type of the dataset
            # lines = open("./simulators/house/datapoint_type_house" + str(hid) + ".txt").readlines()
            # datapoint_id_type = {}
            # for line in lines:
            #     words = line.split()
            #     datapoint_id, datapoint_type = words[0], words[1:]
            #     datapoint_id_type[int(datapoint_id)] = datapoint_type  # .strip()
            #
            # # Filter manipulation type
            # all_test_data = list(filter(lambda datapoint: "manipulation" not in datapoint_id_type[datapoint.get_id()],
            #                       all_test_data))

            test_split.append(all_test_data)

        processes = []

        # Start the training thread(s)
        ports = find_k_ports(num_processes)
        master_logger.log("Found K Ports")
        for i, port in enumerate(ports):
            test_chunk = test_split[i]
            tmp_config = {k: v for k, v in config.items()}
            tmp_config["port"] = port
            print("Client " + str(i) + " getting a test set of size ", len(test_chunk))
            server = HouseServer(tmp_config, action_space, port)
            master_logger.log("Server Initialized")
            client_logger = multiprocess_logging_manager.get_logger(i)
            p = mp.Process(target=HouseDecoupledPredictorNavigatorAgent.do_test, args=(house_ids[i],
                                                                                       shared_goal_prediction_model,
                                                                                       shared_navigator_model,
                                                                                       shared_action_type_model,
                                                                                       tmp_config, action_space,
                                                                                       meta_data_util, constants,
                                                                                       test_chunk, experiment_name,
                                                                                       i, server,
                                                                                       client_logger, vocab, "inferred"))
            p.daemon = False
            p.start()
            processes.append(p)

        for p in processes:
            p.join()

    except Exception:
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)
Exemplo n.º 8
0
def main():

    data_filename = "./simulators/house/AssetsHouse"
    experiment_name = "emnlp_camera_ready_test_human_performance"
    experiment = "./results/" + experiment_name
    print("EXPERIMENT NAME: ", experiment_name)

    # Create the experiment folder
    if not os.path.exists(experiment):
        os.makedirs(experiment)

    # Number of processes
    house_id = 3

    # Define log settings
    log_path = experiment + '/test_baseline_%d.log' % house_id
    multiprocess_logging_manager = MultiprocessingLoggerManager(
        file_path=log_path, logging_level=logging.INFO)
    master_logger = multiprocess_logging_manager.get_logger("Master")
    master_logger.log(
        "----------------------------------------------------------------")
    master_logger.log(
        "                    STARING NEW EXPERIMENT                      ")
    master_logger.log(
        "----------------------------------------------------------------")

    with open("data/house/config.json") as f:
        config = json.load(f)
    with open("data/shared/contextual_bandit_constants.json") as f:
        constants = json.load(f)
    constants['horizon'] = 40  # TODO HACK!!
    print(json.dumps(config, indent=2))

    # TODO: HouseSetupValidator()
    # setup_validator = BlocksSetupValidator()
    # setup_validator.validate(config, constants)

    # log core experiment details
    master_logger.log("CONFIG DETAILS")
    for k, v in sorted(config.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("CONSTANTS DETAILS")
    for k, v in sorted(constants.items()):
        master_logger.log("    %s --- %r" % (k, v))
    master_logger.log("START SCRIPT CONTENTS")
    with open(__file__) as f:
        for line in f.readlines():
            master_logger.log(">>> " + line.strip())
    master_logger.log("END SCRIPT CONTENTS")

    config["use_manipulation"] = True  # debug manipulation
    action_space = ActionSpace(config["action_names"], config["stop_action"],
                               config["use_manipulation"],
                               config["num_manipulation_row"],
                               config["num_manipulation_col"])
    meta_data_util = MetaDataUtil()

    # TODO: Create vocabulary
    vocab = dict()
    vocab_list = open(data_filename + "/house_all_vocab.txt").readlines()
    for i, tk in enumerate(vocab_list):
        token = tk.strip().lower()
        vocab[i] = token
    vocab[len(vocab_list)] = "$UNK$"
    config["vocab_size"] = len(vocab_list) + 1

    try:
        # Create the model
        master_logger.log("CREATING MODEL")
        model_type = TmpHouseIncrementalModelChaplot
        shared_model = model_type(config, constants)
        # model.load_saved_model("./results/paragraph_chaplot_attention/chaplot_model_epoch_3")

        # make the shared model use share memory
        shared_model.share_memory()

        master_logger.log("MODEL CREATED")
        print("Created Model...")

        # Read the dataset
        test_split = DatasetParser.parse(
            data_filename + "/tokenized_house" + str(house_id) +
            "_discrete_dev.json", config)
        test_split = test_split[2:20]

        # Launch the server
        ports = find_k_ports(1)
        port = ports[0]
        tmp_config = {k: v for k, v in config.items()}
        tmp_config["port"] = port
        print("Client " + str(0) + " getting a validation set of size ",
              len(test_split))
        server = HouseServer(tmp_config, action_space, port)

        launch_k_unity_builds([tmp_config["port"]],
                              "./house_" + str(house_id) + "_elmer.x86_64",
                              arg_str="--config ./AssetsHouse/config" +
                              str(house_id) + ".json",
                              cwd="./simulators/house/")

        server.initialize_server()

        # Create a local model for rollouts
        local_model = model_type(tmp_config, constants)
        # local_model.train()

        # Create the Agent
        print("STARTING AGENT")
        tmp_agent = TmpHouseAgent(server=server,
                                  model=local_model,
                                  test_policy=None,
                                  action_space=action_space,
                                  meta_data_util=meta_data_util,
                                  config=tmp_config,
                                  constants=constants)
        print("Created Agent...")
        tmp_agent.test_human_performance(test_split, vocab, master_logger)

    except Exception:
        exc_info = sys.exc_info()
        traceback.print_exception(*exc_info)