def main(params: Parameters): curriculum_repository_path = params.creatable_directory( CURRICULUM_REPOSITORY_PATH_PARAMETER ) language_mode = params.enum( LANGUAGE_MODE_PARAMETER, LanguageMode, default=LanguageMode.ENGLISH ) train_curriculum, test_curriculum = curriculum_from_params( params, language_mode=language_mode ) strict_curriculum = ExperimentCurriculum( evaluate_curriculum(train_curriculum), evaluate_curriculum(test_curriculum) ) write_experiment_curriculum( curriculum_repository_path, params, language_mode, strict_curriculum, ignored_parameters=immutableset( IGNORED_PARAMETERS.union( {CURRICULUM_REPOSITORY_PATH_PARAMETER, LANGUAGE_MODE_PARAMETER} ) ), ) if __name__ == "__main__": parameters_only_entry_point(main)
}) run_python_on_parameters( experiment_name, log_experiment_script, experiment_params, depends_on=[], ) write_workflow_description() EXPERIMENT_NAME_FORMAT = ( "{num_instances:d}_instances-{num_noise_instances:d}_noise_instances-{num_objects_in_instance:d}_" "objects_in_instance-{prob_given:.3f}_given-{prob_not_given:.3f}_not_given-{add_gaze}_gaze" ) FIXED_PARAMETERS = { "curriculum": "pursuit", "learner": "pursuit-gaze", "pursuit": { "learning_factor": 0.05, "graph_match_confirmation_threshold": 0.7, "lexicon_entry_threshold": 0.7, "smoothing_parameter": 0.001, }, } if __name__ == "__main__": parameters_only_entry_point(gaze_ablation_runner_entry_point)
viz, ) except RuntimeError as err: print(f"uncaught exception: {err}") else: # render phase 1 scenes: root_output_directory = params.optional_creatable_directory( "screenshot_directory") assert root_output_directory is not None if not os.path.isdir(root_output_directory): os.mkdir(root_output_directory) for idx, instance_group in enumerate( build_curriculum(None, None, GAILA_PHASE_1_LANGUAGE_GENERATOR)): # do any filtering here if instance_group.name() in EXCLUDED_CURRICULA: continue directory_name = f"{idx:03}-{instance_group.name()}" if not os.path.isdir(root_output_directory / directory_name): os.mkdir(root_output_directory / directory_name) # type: ignore # then call some function from make_scenes.py to run the curriculum make_scenes(params, [instance_group], root_output_directory / directory_name, viz) if __name__ == "__main__": parameters_only_entry_point(main, usage_message=USAGE_MESSAGE)
depends_on=[build_container], job_profiles=[job_profile], resource_request=saga31_request, container=python36, input_file_paths=[input_file], output_file_paths=[add_y_output_file_nas], ) sort_job = run_python_on_parameters( job_locator / "sort", sort_nums_in_file, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=sorted_output_file_nas, ) _ = stop_docker_as_service( mongo4_4, depends_on=[start_mongo, sort_job], resource_request=saga31_request ) # Generate the Pegasus DAX file & a Submit Script write_workflow_description(tmp_path) if __name__ == "__main__": parameters_only_entry_point(example_workflow)
) elif curriculum_name in ( "m15-object-noise-experiments", "m18-integrated-learners-experiment", ): return ( training_instance_groups( num_samples, num_noise_objects, language_generator, params=params.namespace_or_empty("train_curriculum"), ), test_instance_groups( 5, 0, language_generator, params=params.namespace_or_empty("test_curriculum"), ) if test_instance_groups else [], ) return ( training_instance_groups(num_samples, num_noise_objects, language_generator), test_instance_groups(num_samples, num_noise_objects, language_generator) if test_instance_groups else [], ) if __name__ == "__main__": parameters_only_entry_point(log_experiment_entry_point)
CURRICULUM_NAME_FORMAT = ( "noise@{noise}-shuffled@{shuffled}-attributes@{attributes}-relations@{relations}" ) CURRICULUM_PARAMS = { "block_multiple_of_same_type": True, "include_targets_in_noise": False, "min_noise_objects": 1, "max_noise_objects": 10, "min_noise_relations": 0, "max_noise_relations": 5, "random_seed": 0, "chooser_seed": 0, } FIXED_PARAMETERS = { "curriculum": "m18-integrated-learners-experiment", "learner": "integrated-learner-params", "action_learner": { "learner_type": "none" }, "plural_learner": { "learner_type": "none" }, "include_functional_learner": False, "include_generics_learner": False, } if __name__ == "__main__": parameters_only_entry_point(integrated_experiment_entry_point)
FILE_NAME_STRING = ( "pursuit-ablating-gaze-{num_instances:d}_instances-{num_noise_instances:d}_noise_instances-{num_objects_in_instance:d}_" "objects_in_instance-{prob_given:.3f}_given-{prob_not_given:.3f}_not_given-{add_gaze}_gaze" ) PARAM_FILE_STRING = """_includes: - "../../root.params" - "m13.params" experiment: '{experiment}' curriculum: pursuit learner: pursuit-gaze accuracy_to_txt : True pursuit: learning_factor: 0.05 graph_match_confirmation_threshold: 0.7 lexicon_entry_threshold: 0.7 smoothing_parameter: .001 pursuit-curriculum-params: num_instances: {num_instances:d} num_noise_instances: {num_noise_instances:d} num_objects_in_instance: {num_objects_in_instance:d} add_gaze : {add_gaze} prob_given : {prob_given:.3f} prob_not_given : {prob_not_given:.3f}""" if __name__ == "__main__": parameters_only_entry_point(create_gaze_ablation_entry_point)
FIXED_PARAMETERS = { "curriculum": "m15-object-noise-experiments", "learner": "integrated-learner-params", "post_observer": { "include_acc_observer": False, "include_pr_observer": True, "log_pr": True, }, "attribute_learner": { "learner_type": "none" }, "relation_learner": { "learner_type": "none" }, "action_learner": { "learner_type": "none" }, "plural_learner": { "learner_type": "none" }, "include_functional_learner": False, "include_generics_learner": False, "test_observer": { "accuracy_to_txt": True }, } if __name__ == "__main__": parameters_only_entry_point(object_language_ablation_runner_entry_point)