def test_segment_contains(self): sg1 = segment((0,0),(-8,0)) self.assertTrue(sg1.contains((-2,0))) self.assertFalse(sg1.contains((-2,1))) sg1 = segment((0,0),(0,10)) self.assertFalse(sg1.contains((0,-20))) self.assertTrue(sg1.contains((0,1)))
def make_system(self): # called first self.size = self.options['size'] self.resolution = self.options['resolution'] lx, ly = self.size[0], self.size[1] cell_size_max = max(lx / self.resolution[0], ly / self.resolution[1]) if (self.min_mean_free_path != None and cell_size_max > self.min_mean_free_path): message = "Max cell size is {:e} m and higher than the min mean free path which is {:e}.".format( cell_size_max, self.min_mean_free_path) warnings.warn(message) self.size = [lx, ly, self.options['lz']] walls = [segment(Point(0,0),Point(0,ly)), segment(Point(0,0),Point(lx,0)), \ segment(Point(lx,0),Point(lx,ly)), segment(Point(0,ly),Point(lx,ly))] return walls
def start(): files = [] files.extend([ os.path.join(".saved/tags/blocks", f) for f in os.listdir(".saved/tags/blocks") if os.path.isfile(os.path.join(".saved/tags/blocks", f)) ]) files.extend([ os.path.join(".saved/tags/entity_types", f) for f in os.listdir(".saved/tags/entity_types") if os.path.isfile(os.path.join(".saved/tags/entity_types", f)) ]) files.extend([ os.path.join(".saved/tags/items", f) for f in os.listdir(".saved/tags/items") if os.path.isfile(os.path.join(".saved/tags/items", f)) ]) files.extend([ os.path.join(".saved/tags/liquids", f) for f in os.listdir(".saved/tags/liquids") if os.path.isfile(os.path.join(".saved/tags/liquids", f)) ]) for f in files: sp = f.split("/") if main.segment("minecraft:", 0, sp[-1]): split = f.split(":") if not split[1].endswith(".txt"): os.rename(f, f"{'/'.join(sp[:-1])}/minecraft_{split[1]}.txt") else: os.rename(f, f"{'/'.join(sp[:-1])}/minecraft_{split[1]}")
def test_segment_intersection(self): sg1 = segment([0,0], [2,0]) sg2 = segment([1,0], [1,2]) self.assertEqual(sg1.intersect(sg2), (1,0)) self.assertEqual(sg2.intersect(sg1), (1,0)) sg3 = segment((10,0),(20,0)) self.assertIsNone(sg1.intersect(sg3)) sg1 = segment((0,0),(8,0)) sg2 = segment((6,3),(6,7)) self.assertIsNone(sg1.intersect(sg2)) self.assertIsNone(sg2.intersect(sg1)) sg1 = segment((0,0),(-8,0)) sg2 = segment((-2,1),(-2,-7)) self.assertEqual(sg1.intersect(sg2), (-2,0)) self.assertEqual(sg2.intersect(sg1), (-2,0))
# loss_tmp, acc_tmp = sess.run([loss_op, accuracy], feed_dict={X: batch_x, X_length: batch_x_length, # Y: batch_y}) # print(acc_tmp) # pdb.set_trace() res = {} # key: three keys for each word in tweet for j in range(batch_x_length[i]): res[j, 'state'] = state_list[0][0][j] res[j, 'word'] = vocab_by_value[batch_x[i][j]] res[j, 'predicted'] = predicted[j] result.append(res) return previous_valid_acc, result, batch_y embed_size = 20 full, vocab_dict, embedding_matrix = segment('train.csv', size=embed_size) vocab_by_value = {} for key in vocab_dict: vocab_by_value[vocab_dict[key]] = key num_hidden = 3 dropout = 1 #this is actually 1-dropout... previous_valid_acc, result, batch_y = train_and_visualize(num_hidden, dropout) def plot_neuron(neuron_num): for i in range(len(result)): #each res is a tweet words = [] #contains words and prediction so far acts = []