def web_animations(dir_path): for noise in ['0.01', '0.05', '0.1']: for bottleneck in ['1', '2', '3', '4']: for exposures in ['1', '2', '3', '4']: input_file = '../data/model_sim/1.0_%s_%s_%s.json' % ( noise, bottleneck, exposures) output_file = dir_path + 's_1.0_%s_%s_%s' % (noise, bottleneck, exposures) best_chain = il_results.extract_dataset( tools.read_json_file(input_file), 0, 50, 'lang_cost', True) il_animations.save_animation(best_chain, output_file, show_seen=False, create_thumbnail=True) il_animations.save_animation(best_chain, output_file + '_seen', show_seen=True, create_thumbnail=False) input_file = '../data/model_inf/1.0_%s_%s_%s.json' % ( noise, bottleneck, exposures) output_file = dir_path + 'i_1.0_%s_%s_%s' % (noise, bottleneck, exposures) best_chain = il_results.extract_dataset( tools.read_json_file(input_file), 0, 50, 'lang_cost', True) il_animations.save_animation(best_chain, output_file, show_seen=False, create_thumbnail=True) il_animations.save_animation(best_chain, output_file + '_seen', show_seen=True, create_thumbnail=False) input_file = '../data/model_inf/500.0_%s_%s_%s.json' % ( noise, bottleneck, exposures) output_file = dir_path + 'i_500.0_%s_%s_%s' % ( noise, bottleneck, exposures) best_chain = il_results.extract_dataset( tools.read_json_file(input_file), 0, 50, 'lang_cost', True) il_animations.save_animation(best_chain, output_file, show_seen=False, create_thumbnail=True) il_animations.save_animation(best_chain, output_file + '_seen', show_seen=True, create_thumbnail=False)
def extract_generation_distribution(data_path, measure, generation): data = tools.read_json_file(data_path) distribution = [] for chain in data['chains']: datum = chain['generations'][generation][measure] distribution.append(datum) return distribution
def __init__(self, parameters=None): ip = str(parameters['ip']) if 'ip' in parameters else DEFAULT_UDP_IP port = int( parameters['port']) if 'port' in parameters else DEFAULT_UDP_PORT buffersize = int( parameters['buffersize'] ) if 'buffersize' in parameters else DEFAULT_UDP_BUFFERSIZE self._blueprint = read_json_file(JSON_STRUCT_FILE) self.udpserver = UDPServer(ip, port, buffersize)
def make_model_chains_figure(figure_path): best_chain_sim = il_results.extract_dataset( tools.read_json_file('../data/model_sim/1.0_0.01_2_2.json'), 0, 50, 'lang_cost', True) best_chain_inf = il_results.extract_dataset( tools.read_json_file('../data/model_inf/1.0_0.01_2_2.json'), 0, 50, 'lang_cost', True) best_chain_strong_inf = il_results.extract_dataset( tools.read_json_file('../data/model_inf/500.0_0.01_2_2.json'), 0, 50, 'lang_cost', True) best_chain_sim['chain_id'] = 0 best_chain_inf['chain_id'] = 1 best_chain_strong_inf['chain_id'] = 2 data = {'chains': [best_chain_sim, best_chain_inf, best_chain_strong_inf]} il_visualize.make_figure(data, figure_path, start_gen=0, end_gen=50, n_columns=17, method='language', rect_compress=True)
def load(data_path, start_gen, end_gen, method='prod', return_typical_chain=False): data = tools.read_json_file(data_path) dataset = {} for measure in ['expressivity', 'complexity', 'cost', 'error']: dataset[measure] = extract_dataset(data, start_gen, end_gen, method + '_' + measure, return_typical_chain) return dataset
def main(): config = read_json_file("settings.json") generate_database() start_time = time.time() save_global_pref('start_time', start_time) vk_runner = VkRunner(config) vk_runner.start() tg_runner = TgRunner(config) tg_runner.start() pass
def generate_csv_for_stats(input_file, output_file, start_gen=1, end_gen=10): dataset = tools.read_json_file(input_file) csv = 'subject,chain,generation,expressivity,error,complexity,cost\n' subject = 1 for chain_i, chain in enumerate(dataset['chains'], 1): for gen_i in range(start_gen, end_gen+1): generation = chain['generations'][gen_i] expressivity = generation['prod_expressivity'] error = generation['prod_error'] complexity = generation['prod_complexity'] cost = generation['prod_cost'] csv += '%i,%i,%i,%i,%s,%s,%s\n' % (subject, chain_i, gen_i, expressivity, str(error), str(complexity), str(cost)) subject += 1 with open(output_file, mode='w') as file: file.write(csv)
def plot(datasets, shape, nsims, maxcats, figure_path, figsize=(5, 4.8)): fig, axes = plt.subplots(len(datasets), 1, figsize=figsize, squeeze=False) for (dataset, xlim, ylim), axis in zip(datasets, axes.flatten()): plot_space(axis, dataset, shape, xlim, ylim, nsims, maxcats) fig.tight_layout(pad=0.1, h_pad=0.5, w_pad=0.5) fig.savefig(figure_path, format='svg') tools.format_svg_labels(figure_path) if not figure_path.endswith('.svg'): tools.convert_svg(figure_path, figure_path) if __name__ == '__main__': paper2_experiment_2 = tools.read_json_file( '../data/experiments/exp2_chains.json') plot([(paper2_experiment_2, (0, 600), (4.25, 6.25))], (8, 8), 40, 8, '/Users/jon/Desktop/simp_inf_space_paper2.eps', (5, 3.5)) paper3_experiment_1 = { 'chains': [{ 'chain_id': 'A', 'first_fixation': None, 'generations': [{ 'prod_cost': 1.436063501088083, 'prod_complexity': 510.2091177638209 }, { 'prod_cost': 1.8013146362322552, 'prod_complexity': 500.3523506476383
def make_experiment_chains_figure(figure_path): data = tools.read_json_file('../data/experiments/exp2_chains.json') il_visualize.make_figure(data, figure_path, start_gen=0, end_gen=50, n_columns=17, rect_compress=True)
'J18': 2, 'J19': 2, 'J20': 2, 'J21': 2, 'J22': 2, 'K17': 3, 'K18': 3, 'K19': 3, 'L26': 2, 'L27': 2, 'L28': 2, 'L29': 2, 'L30': 2 } results = tools.read_json_file('../data/experiments/exp2_chains.json') chain_letters = 'ABCDEFGHIJKL' participant_i = 1 for c, chain in enumerate(results['chains']): for g, generation in enumerate(chain['generations']): error = generation['prod_error'] if error is not None and error < 3.0: # exclude participants whose VI is greater than 3.0 data_in = [ tuple([tuple(meaning), signal]) for meaning, signal in chain['generations'][g - 1]['data_out'] ] data_out = [ tuple([tuple(meaning), signal]) for meaning, signal in np.ndenumerate(