def get_nearby_transporters(self,max_distance=16093): """ Return a list of the transporters available near payload. """ base_query = Transporter.all().filter('available =',True) return Transporter.proximity_fetch(base_query, self.current_location, max_results=10, max_distance=max_distance) #within 10 miles
def __init__(self, filepath): with io.open(filepath, 'r', encoding='utf-8') as f: self.raw = json.load(f) self.compounds = [ Compound(x['name'], x['xrefs']) for x in self.raw['compounds'] ] self.remedies = [ Remedy(x['name'], x['xrefs']) for x in self.raw['remedies'] ] self.enzymes = [ Enzyme(x['name'], x['xrefs']) for x in self.raw['enzymes'] ] self.transporter = [ Transporter(x['name'], x['xrefs']) for x in self.raw['transporter'] ] self.drugs = [Drug(x['name'], x['xrefs']) for x in self.raw['drugs']] publication = self.raw['publication'] doi = publication['doi'] if 'doi' in publication else None self.publication = Reference(publication['pmid'], doi, publication['citation'])
else: if "x_train" in locals(): ae.train(AE_STEPS, x_train, x_test, lr=0.003) else: ae.train_iter(AE_STEPS, train_loader, test_loader, lr=0.003) # Prepare the Latent Space Model if not 'x_test' in locals(): # print ("EVENTUALLY CHANGE THIS BACK AS WELL!!!") x_test = np.load("faces.npy") # x_test = unload(test_loader) encodings = ae.encode(x_test) if MODEL == 'transporter': model = Transporter(encodings, DISTR, FOLDER, BATCH_SIZE_GEN) elif MODEL == 'generator': model = Generator(encodings, DISTR, FOLDER, BATCH_SIZE_GEN) # I Could try L2 Loss instead of L1? else: raise NotImplementedError # Train the Latent Space Model if GEN_LOAD: model.load_weights(MODEL) else: model.train(STEPS, lr=0.001) # I should try adjusting the learning rate? #model.train(STEPS//2, lr=0.0003) #model.train(STEPS//2, lr=0.0001) # Display Results fake_distr = model.generate(batches=1)
def __init__(self): Transporter.__init__(self) self._channel = None
# Load the right dataset if DATASET == 'moons': latent, test = make_moons() elif DATASET == 'two_cluster': latent, test = two_cluster() elif DATASET == 'eight_cluster': latent, test = eight_cluster() elif DATASET == 'circles': latent, test = make_circles() else: raise NotImplementedError # Prepare the Latent Space Model if MODEL == 'transporter': model = Transporter(latent, DISTR, FOLDER, BATCH_SIZE_GEN) elif MODEL == 'generator': model = Generator(latent, DISTR, FOLDER, BATCH_SIZE_GEN) else: raise NotImplementedError # Train the Latent Space Model if GEN_LOAD: model.load_weights(MODEL) else: model.train(STEPS, lr=0.0001, images=True) # Evaluate model.evaluate() # Display Results
def test_Transporter_images_is_a_text_file(): trans_obj = Transporter() trans_obj.add_images("test_emailer.py") # if there are no images, then no Content-ID added to msg_root print(trans_obj.msg_root) assert 'Content-ID' not in trans_obj.msg_root
def test_Transporter_images_is_a_string_and_is_missing(): trans_obj = Transporter() trans_obj.add_images("missing_image.jpg") # if there are no images, then no Content-ID added to msg_root assert 'Content-ID' not in trans_obj.msg_root
def test_Transporter_missing_images(): trans_obj = Transporter() trans_obj.add_images(["missing_image.jpg"]) # if there are no images, then no Content-ID added to msg_root assert 'Content-ID' not in trans_obj.msg_root
def test_Transporter_message_text(): trans_obj = Transporter() trans_obj.build_message_text(string_message='this is a message') assert trans_obj.string_message == 'this is a message'
def test_Transporter_creation(): trans_obj = Transporter() assert trans_obj is not None