def __init__(self, **kwargs): constructor(plotter.arg_table, args=kwargs).apply(self) if self.lines is None: self.lines = (line_info('total', style='solid'), line_info('infected', style='dashed'), line_info('dead', style='dotted')) self.colors = plotter.colors() self.colors.load(self.props)
def __init__(self, props=None, cmd_args=None, **kwargs): start_time = time.time() constructor(world.arg_table, args=kwargs, props=props, cmd_args=cmd_args).apply(self) infection_counter.__init__(self) self.recovery_dist = lognormal(props.get(float, 'recovery_time'), props.get(float, 'recovery_sd')) self.gestating_dist = lognormal(props.get(float, 'gestating_time'), props.get(float, 'gestating_sd')) self.day = self.next_day = 0 self.pop = self.population # backward compatibility hack self.prev_infected = self.initial_infected self.total_infected = self.initial_infected self.prev_total = self.initial_infected self.prev_recovered = 0 self.max_infected = 0 self.immune = 0 self.growth = self.max_growth = 1 self.days_to_double = 0 self.daily = {} self.props = props self.geometry = geometry(self.size_x, self.size_y) self._make_infection_prob() self._add_cities() self.untouched_cities = len(self.cities) self.untouched_clusters = sum( [c.get_untouched_clusters() for c in self.cities]) self.susceptible_clusters = sum( [c.get_susceptible_clusters() for c in self.cities]) self.city_cache = cached_choice(self.cities, lambda c: c.target_pop) cluster.nest_clusters(self) self.infected_list = people_list() self.gestating_list = people_list() self._add_people() self.total_clusters = sum([c.get_leaf_clusters() for c in self.cities]) self.cities_by_pop = sorted(self.cities, key=lambda c: c.pop, reverse=True) for c in self.cities: c.set_exposure() c.make_neighbors() self._infect_cities() self.susceptible_list = people_list( [p for p in self.people if p.is_susceptible()]) self.setup_time = time.time() - start_time self.start_time = time.time()
def __init__(self, **kwargs): constructor(bubbles.arg_table, kwargs).apply(self) super().__init__(**kwargs) self.graph_height = graph_height self.right_margin = 0.2 self.square = True self.max_infected = 0 self.bubble_size = default_bubble_size * bubble_size_base_world_size / self.world_size self.lines = (plotter.make_line_info('total', color=self.colors.total, style='solid'), plotter.make_line_info('infected', color=self.colors.infected, style='dashed'), plotter.make_line_info('dead', color=self.colors.dead, style='dotted')) self.cities = None
def load_model(path): checkpoint = torch.load(path) net = checkpoint['net'] dropout = checkpoint['dropout'] fc1_hidden_layers = checkpoint['fc1_hidden_layers'] fc1_in_size = checkpoint['fc1_in_size'] learning_rate = checkpoint['learning_rate'] num_labels = checkpoint['num_labels'] model, _, _ = constructor.constructor(net, dropout, fc1_hidden_layers, fc1_in_size, learning_rate, num_labels) model.class_to_idx = checkpoint['class_to_idx'] model.load_state_dict(checkpoint['state_dict']) return model
# break reaction.meta['id'] = n try: cgrs = ~reaction except ValueError: continue if cgrs.center_atoms: v = cgrs.center_atoms if any(x.is_radical or x.p_is_radical for _, x in cgrs.atoms()): continue if cgrs.center_atoms: print('len_fg_fg = ' + str(len(fg_fg))) perebor = enumeration_cgr(reaction) for new_reaction in perebor: new_reaction.standardize() new_reaction.meta['id'] = n if not constructor(*cycl(new_reaction), fg_fg, n): print('COMPOSITION IS None ' + str(n)) det = True break if det: break with open('/home/ravil/Desktop/Projects/retro/transfoRmULES', 'wb') as rule_dump: pickle.dump(fg_fg, rule_dump) with RDFwrite('/home/ravil/Desktop/Projects/retro/transfoRmULES.rdf') as fg: for x in fg_fg.values(): fg.write(x)
train_datasets, batch_size=batchsize, shuffle=True) #, drop_last=True)#, pin_memory=True)#,num_workers=10) valid_loader = torch.utils.data.DataLoader( valid_image_datasets, batch_size=64, shuffle=True) #, drop_last=True)#, pin_memory=True)#,num_workers=10) test_loader = torch.utils.data.DataLoader( test_image_datasets, batch_size=64 ) #, shuffle=True, drop_last=True)#, pin_memory=True)#,num_workers=10) # Categories #with open('cat_to_name.json', 'r') as f: # cat_to_name = json.load(f) model, optimizer, criterion = constructor.constructor(net, dropout, fc1_hidden_layers, fc1_in_size, learning_rate, num_labels) # Defining validation def validation(model, valid_loader, criterion): model.to(machine) valid_loss = 0 accuracy = 0 for inputs, labels in valid_loader: inputs, labels = inputs.to(machine), labels.to(machine) output = model.forward(inputs) valid_loss += criterion(output, labels).item()