def generate_date(self): self.instances = [] for country in self.countries: if country == 'Not_Stamp': files = glob.glob(self.dataset_folder + os.path.sep + country + os.path.sep + "*.*") for file in files: self.instances.append(Instance(file, country.ljust(15), '-1000')) else: for year in self.years: files = glob.glob(self.dataset_folder + os.path.sep + country + os.path.sep + year + os.path.sep + "*.*") for file in files: self.instances.append(Instance(file, country.ljust(15), year)) print('Total # of instances:' + str(len(self.instances))) print('Shuffling instances') rr = range(len(self.instances)) np.random.shuffle(rr) self.instances = np.take(self.instances, rr, axis=0) print('Generating training instances') self.training_instances = [] for i in xrange((len(self.instances) * 2) / 3): self.training_instances.append(self.instances[i]) print('Generating testing instances') self.testing_instances = [] for i in range((len(self.instances) * 2) / 3, len(self.instances)): self.testing_instances.append(self.instances[i]) print('Done.')
def test_find_clique_by_neighbors_1_OK(self): graph = Instance() graph.read_file(SolutionGreedyNeighborsCliqueTest.GRAPH_1_TEST) solution = SolutionGreedyNeighbors(graph, 'test_graph_greedy_simple_1') clique = solution.find_clique_by_neighbors() print(clique) self.assertEqual([1, 2, 3, 5], clique)
def get_by_arg(self, arg, module_name, dictionary=False): instance = Instance() instance.__class__.__name__ = module_name def ret(x): return '"'+x+'"' \ if type(x).__name__ == "str" \ or type(x).__name__ == "unicode" \ else str(x) ids = "" for kk in arg.keys(): ids += kk + \ "=" +\ ret(arg[kk]) + \ " AND " ids = ids[:len(ids) - 5] query = "SELECT * " + \ ", ".join(instance.to_dict().keys()) + \ " FROM " + \ module_name + \ " WHERE " + \ ids return self.execute_query(query, module_name, one_result=True, dictionary=dictionary)
def test_apply_ls(self): solution = {16, 18, 19, 20, 21, 23} graph = Instance() file = SolutionGraspTests.GRAPH_TEST graph.read_file(file) instace_sol = SolutionGrasp() instace_sol.apply_ls(graph, solution)
def __create_instances(self, dep_id, common_section, config_sections): emailid = common_section.emailid instance_user_name = common_section.instance_user_name # Launch instances for section in config_sections: section_name = section.section_name count = int(section.count) tag = section.tag machinetype = section.machinetype disk1image = section.disk1image disk2image = section.disk2image disk1type = section.disk1type disk2type = section.disk2type disk1size = section.disk1size disk2size = section.disk2size for num in range(count): instance_name = emailid + "-" + dep_id + "-" + section_name + "-" + tag + "-" + str( num) disk1 = Disk(instance_name + "-d1", disk1image, disk1size, disk1type) disk2 = Disk(instance_name + "-d2", disk2image, disk2size, disk2type) disk1.create() disk2.create() disk_list = [disk1, disk2] instance = Instance(instance_name, disk_list, machinetype, instance_user_name) instance.create(common_section) if section_name == "master": self.master_instances.append(instance) else: self.slave_instances.append(instance) self.instances.append(instance)
def readInstances(self, path, maxInst = -1): insts = [] r = open(path, encoding='utf8') info = [] inst = Instance() for line in r.readlines(): line = line.strip() if line == "" and len(inst.m_char) != 0: if (maxInst == -1) or (maxInst > len(insts)): inst.m_word = get_words(inst.m_char, inst.m_label) insts.append(inst) else: return insts inst = Instance() else: info = line.split(" ") if len(info) != 3: print("error format") uni_char = unicodedata.normalize('NFKC', info[0]) inst.m_char.append(uni_char) bi_char = unicodedata.normalize('NFKC', info[1][4:]) inst.m_bichar.append(bi_char) inst.m_label.append(info[2]) r.close() if len(inst.m_char) != 0: insts.append(inst) return insts
def __create_instances(self, dep_id, common_section, config_sections): emailid = common_section.emailid instance_user_name = common_section.instance_user_name # Launch instances for section in config_sections: section_name = section.section_name count = int(section.count) tag = section.tag machinetype = section.machinetype disk1image = section.disk1image disk2image = section.disk2image disk1type = section.disk1type disk2type = section.disk2type disk1size = section.disk1size disk2size = section.disk2size for num in range(count): instance_name = emailid + "-" + dep_id + "-" + section_name + "-" + tag + "-" + str(num) disk1 = Disk(instance_name + "-d1", disk1image, disk1size, disk1type) disk2 = Disk(instance_name + "-d2", disk2image, disk2size, disk2type) disk1.create() disk2.create() disk_list = [disk1, disk2] instance = Instance(instance_name, disk_list, machinetype, instance_user_name) instance.create(common_section) if section_name == "master": self.master_instances.append(instance) else: self.slave_instances.append(instance) self.instances.append(instance)
def build(self, image, mask_result): result = [] # check channels to be 4 (b, g, r, alpha) height, width, channels = image.shape if channels != 4: temp = np.full((height, width, 4), 255, dtype=np.uint8) temp[:, :, :3] = image image = temp for mask in mask_result: root = Group() root.add_node(Instance(image, "background", (0, 0))) for i in range(len(mask["class_ids"])): if mask["scores"][i] < self.threshold: continue top, left, down, right = mask["rois"][i] class_name = self.to_class_name(mask["class_ids"][i]) splitted_image = self.masking( image[top:down, left:right, :], mask["masks"][top:down, left:right, i]) image[top:down, left:right, :] = self.masking(image[top:down, left:right, :], \ np.logical_not(mask["masks"][top:down, left:right, i])) root.add_node(Instance(splitted_image, class_name, (left, top))) result.append(root) return result
def test_find_max_clique_1_OK(self): graph = Instance() graph.read_file(SolutionGreedyMaxCliqueTest.GRAPH_1_TEST) solution = SolutionGreedy(graph, 'test_graph_greedy_simple_1') max_clique = solution.find_max_clique() print(max_clique) self.assertEqual([1, 2, 3, 5], max_clique)
def get_worker_instance(self, instances, inst_type): """ Work out if there is already an existing instance that will fulfill the job. If there is not, then a new instance should be requested. Because this thread is running by itself, we can wait for the instance to launch and be set up before returning. """ # Check if one of the existing resources will do the job for inst in instances: if inst.type == inst_type: return inst # Otherwise acquire a new instance self.logger.debug("no istances found, so starting a new one.") #no instances of this type exist, so start one zone = self.get_cheapest_spot_zone(inst_type) subnet = self.get_subnet_id(zone) cpus = 0 instance_types = self.load_instances() for ins in instance_types: if ins.type == inst_type: cpus = ins.cpus break # Create an instance object from this data new_inst = Instance(inst_type, subnet, zone, self.inst_desc['bid'], self.db_manager, instance_types) # Now launch the instance and wait for it to come up new_inst.launch() return new_inst
def add_point_light(self, light): sphere = Instance(Sphere(Point3(0, 0, 0), 0.1, material=Emissive())) sphere.translate(light.location.x, light.location.y, light.location.z) sphere.casts_shadows = False self.geometry.append(sphere) self.lights.append(light)
def test_find_clique_by_neighbors_2_OK(self): graph = Instance() graph.read_file(SolutionGreedyNeighborsCliqueTest.GRAPH_2_TEST) solution = SolutionGreedyNeighbors(graph, 'test_graph_type_1') clique = solution.find_clique_by_neighbors() print(clique) self.assertEqual([3, 11, 17, 21, 25, 28, 32, 36, 38, 39, 66, 72], clique)
def test_kexu(target): inst_files = os.listdir('./frb30-15-msc') inst_names = [fname[0:len(fname) - 4] for fname in inst_files] inst = Instance() inst.load_kexu("./frb30-15-msc/" + inst_files[target]) return (inst, inst_names[target])
def test_compute_label(self): v1 = {"features": (3, 4), "label_array": (0, 0, 0, 0, 0, 1, 0, 0, 0)} ins = Instance(v1['features'], v1['label_array']) self.assertEqual(ins.label, 5) v1 = {"features": (3, 4), "label": 7} ins = Instance(v1['features'], label=v1['label']) self.assertEqual(ins.label, 7)
def test_solution_OK(self): graph = Instance() graph.read_file(SolutionTest.GRAPH_SIMPLE_1_TEST_PTH) solution = Solution(graph, 'test-graph-simple-1') cliques = solution.get_solution_max_cliques() print(cliques) expected_sol = [{0, 1, 2}] self.assertEqual(expected_sol, cliques)
def test_find_clique_by_neighbors_3_OK(self): graph = Instance() graph.read_file(SolutionGreedyNeighborsCliqueTest.GRAPH_3_TEST) solution = SolutionGreedyNeighbors(graph, 'test_graph_type_1_worst') clique = solution.find_clique_by_neighbors() print(clique) self.assertEqual([0, 6, 7, 8, 9, 40, 92, 99, 109, 115, 142, 154, 284, 307, 366, 379, 402, 412, 429, 448], clique)
def send_status(instance_id): post_params = request.get_json() instancee = Instance(id=instance_id) if not instancee.is_valid(): return Response(to_json(None), mimetype=_JSON_MIME, status=404) status = 200 if instancee.set_status(post_params.get('status'), post_params.get('error')) else 422 _db.update_instance(instancee) return Response(to_json(None), mimetype=_JSON_MIME, status=status)
def test_find_max_clique_2_OK(self): graph = Instance() graph.read_file(SolutionGreedyMaxCliqueTest.GRAPH_2_TEST) solution = SolutionGreedy(graph, 'test_graph_type_1') max_clique = solution.find_max_clique() print(max_clique) self.assertEqual([3, 11, 17, 21, 25, 28, 32, 36, 38, 39, 66, 72], max_clique)
def get_instance_from_row(self, row, column_indices): instance = Instance() row_offset = row.offset (entity_text, entity_inferred_name) = get_row_entity_text_and_entity_inferred_name(row, column_indices) instance.attributes["entity_inferred_name"] = entity_inferred_name[:2048] instance.attributes['entity_text'] = entity_text[:2048] instance.attributes["id"] = 0 return self.pipe.push_single(instance)
def get_map(self, num=50): map = [] for x1 in np.linspace(-1.0, 1.0, num): for x2 in np.linspace(-1.0, 1.0, num): p = Instance([x1, x2], None) p.y = self.clasify(p) map.append(p) return map
def generate_candidates(text): doc = nlp(text) instances = [] all_mentions = [] mentions_per_sentence = [] for sentence in doc.sents: mentions = [] # add entities from NER for ent in sentence.ents: start = ent.start end = ent.end if ent[0].text.lower() in STOP_WORDS or ent[0].text.lower( ) in TITLES: start += 1 if ent[-1].text == "." and ent.end == sentence.end: end -= 1 if end > start: mention = Span(doc, start, end) mention._.fused_type = [ent.label_] mentions.append(mention) # add pronouns as mentions for word in sentence: if word.tag_ in PRONOUN_TAGS or word.text.lower() in all_pronouns: mention = Span(doc, word.i, word.i + 1) if mention.text.lower() in inv_pronoun_map: mention._.fused_type = inv_pronoun_map[ mention.text.lower()] mention._.is_pronoun = True mentions.append(mention) if USE_NOUN_CHUNKS: for chunk in sentence.noun_chunks: root = chunk.root if root.ent_type_ or root.tag_ in PRONOUN_TAGS: # we already have it continue mentions.append(chunk) mentions_per_sentence.append(mentions) all_mentions.extend(mentions) for i, mentions in enumerate(mentions_per_sentence): for (subject_entity, object_entity) in itertools.product(mentions, mentions): instances.append(Instance(subject_entity, object_entity)) if i > 0: prev_sentence_mentions = mentions_per_sentence[i - 1] for (subject_entity, object_entity) in itertools.product(mentions, prev_sentence_mentions): instances.append(Instance(subject_entity, object_entity)) for (subject_entity, object_entity) in itertools.product(prev_sentence_mentions, mentions): instances.append(Instance(subject_entity, object_entity)) if USE_ENTITY_LINKER: entitylinker.link(doc, all_mentions) if USE_BERT: bert_wrapper.run(doc) return instances
def test_find_ratio_clique_2_OK(self): file = SolutionGreedyCliqueRatioTest.GRAPH_3_TEST graph = Instance() graph.read_file(file) solution = SolutionGreedyRatio(graph, os.path.splitext(file)[0]) clique = solution.find_clique_by_ratio() print(clique) print(solution.cardinality) print(solution.sol_value)
def test_cosine_dist(self): v1 = Instance((5, 0, 3, 0, 2, 0, 0, 2, 0, 0)) v2 = Instance((3, 0, 2, 0, 1, 1, 0, 1, 0, 1)) self.assertEqual(nn.cosine_dist(v1, v2), 0.06439851429360033) v1 = Instance((3, 4)) v2 = Instance((6, 8)) self.assertEqual(nn.cosine_dist(v1, v2), 0)
def test_generated(target): inst_files = os.listdir('./generated') inst_names = [fname[0:len(fname)] for fname in inst_files] # Testiranje klase instance. inst = Instance() inst.load_delorme("./generated/" + inst_files[target]) return (inst, inst_names[target])
def setUp(self): parser = argparse.ArgumentParser( description="Test instance", parents=[instance.parser, roadef.parser] ) args = parser.parse_args(["-p","data/model_example.txt","-i","data/assignment_example.txt"]) self.args = args self.inst = Instance(args)
def create_new_linode(linde_id=None): j = Instance(linde_id) while j.status != 'running': time.sleep(5) time.sleep(5) j.init_env() # j.ssh_cmd(['nohup python 1.py > out 2>&1 &']) time.sleep(60 * 60)
def test_read_file_ok(self): graph = Instance() graph.read_file(NodeTest.GRAPH_1_TEST_PTH) self.assertEqual(100, graph.get_total_nodes()) self.assertEqual(2266, graph.get_total_edges()) self.assertEqual(100, len(graph.get_nodes()), 100) self.assertTrue(GraphUtils.are_adjacent(graph.get_node(0), graph.get_node(1)))
def __init__(self, name, config, comp=None, net=None, server=False): Instance.__init__(self, name, config, comp, net) self.is_server = server if 'I' in config.protocols: self.ping = PingTool(self) else: self.ping = None if config.tp_tool: self.tp_tool = config.tp_tool(self) else: self.tp_tool = None
def get_instance_from_row(self, row, column_indices): instance = Instance() row_offset = row.offset (entity_text, entity_inferred_name) = get_row_entity_text_and_entity_inferred_name( row, column_indices) instance.attributes[ "entity_inferred_name"] = entity_inferred_name[:2048] instance.attributes['entity_text'] = entity_text[:2048] instance.attributes["id"] = 0 return self.pipe.push_single(instance)
def __init__(self, name, config, comp=None, net=None, server=False): Instance.__init__(self, name, config, comp, net) self.is_server = server if "I" in config.protocols: self.ping = PingTool(self) else: self.ping = None if config.tp_tool: self.tp_tool = config.tp_tool(self) else: self.tp_tool = None
def test_verify_clique(self): SET_D = "test_files/setsPruebasFinal/set-d/wind-2004.txt" SET_F = "test_files/setsPruebasFinal/set-f/DIMACS10/email.txt" clique = {94, 66, 278, 14} graph = Instance() file = SET_D graph.read_file(file) if GraphUtils.is_clique_solution(graph, clique): print("yes") else: print("no")
def test_grasp_OK(self): graph = Instance() file = SolutionGraspTests.GRAPH_SIMPLE_1_TEST_PTH graph.read_file(file) solution_type = SolutionGrasp.ADJACENT fixed_seed = 1 alpha = 0.5 instance_solution = SolutionGrasp() result = instance_solution.find_grasp_solution(graph, file, solution_type, fixed_seed, alpha)
def run1(): """ Run with restricted service level. """ ins = Instance(conf1) ins.sale = shared_sale ins.evaluate_point_prediction() ins.run() ins.plot()
def run2(): """ Run with high service level. """ ins = Instance(conf2) ins.sale = shared_sale ins.evaluate_point_prediction() ins.run() ins.plot()
def run(): ( train, undef, test ) = data.load_data_wrapper() log( "Trainning set : %8d instances", ( len(train) ) ) log( "Testing set : %8d instances", ( len(test) ) ) txt = "Converting %s set to Instance objects" log( txt, ("training") ) train = [ Instance( t[0], label_array=t[1] ) for t in train ] log( txt, ("test") ) test = [ Instance( t[0], label=t[1] ) for t in test ] instance = 1 start_compare = time.time() """ Iterate through testing set """ test_subset = test[0:10] for i in test_subset: log("Instance %d", ( instance )) """ Find the closest pair from training set """ closest_pair = train[0] max_dist = cosine_dist( i, closest_pair ) for j in train[1:1000]: dist = cosine_dist( i, j ) if( dist < max_dist ): max_dist = dist closest_pair = j if( dist == 0 ): break i.predicted_label = closest_pair.label log(">>> %d, actual : %s , predict : %s", ( instance, test[0].label, test[0].predicted_label) ) instance+=1 end_compare = time.time() """ Compute confusion_matrix, accuracy and prediction and recall for each label """ log("----- Confusion Matrix -----") matrix = confusion_matrix( test_subset ) log("%s", ( pandas.DataFrame( matrix ) ) ) log("Accuracy : %0.2f", ( accuracy(matrix) ) ) for i in range(NUM_LABEL): log("Label %d : precision: %.2f \t recall: %.2f", ( i, precision( matrix, i ), recall( matrix, i ) ) ) log("----------------") log("Time spent : %.0f sec", ( end_compare - start_compare ) )
def try_add(instance: Instance) -> bool: if not any([ instance.ip == test.ip and instance.username == test.username for test in InstanceManager._instances ]): instance.start_communication() InstanceManager._instances.append(instance) logger.info(f'Added instance {instance}') InstanceManager.on_instances_update() return True else: return False
def create(self, **data): instance = Instance(aq_base(self)) # load datasheets for schId in self.schemas: if schId in data: if isinstance(data[schId], Datasheet): ds = instance.getDatasheet(schId) ds.__load__(data[schId]) event.notify(ObjectCreatedEvent(instance)) return instance
def main(): f = "/Users/gabriel/Desktop/instances.txt" base_code = ord("A") with open(f) as f: lines = f.readlines() s = int(lines[0]) * int(lines[1]) n = int(lines[2]) l = 4 instances = list() while n > 0: n -= 1 pattern = np.zeros((1, s)) pattern_class = np.zeros((1, int(lines[2]))) pt_index = 0 inst = Instance() for i in range(int(lines[1])): for c in lines[l].strip(): pattern[0][pt_index] = 1 if c == "#" else 0 pt_index += 1 l += 1 pattern_class[0][ord(lines[l].strip()) - base_code] = 1 l += 1 inst.attributes = pattern inst.output_values = pattern_class inst.normalize() instances.append(inst) ann = NeuralNet([35, 35, 26], True) ann.learning_rate = 0.5 ann.momentum = 0.4 ann.instances(instances) ann.train(55) correct = 0 for i in instances: ann.instance(i) ann.feed_forward() f = chr(65 + np.argmax(i.output_values)) print "F={}".format(f) f_hat = chr(65 + np.argmax(ann.output)) print "F'={}".format(f_hat) if f == f_hat: correct += 1 else: print "ERROR!" print print str(correct / float(len(instances)) * 100) + " %"
def list_instances(self): insts = self.handle.servers.list() # Update instance list in self.id2inst self.id2inst.clear() instances = [] for inst in insts: if not self.pattern.match(inst.name): continue instance = Instance(inst, self.ssh_info) self.id2inst[instance.get_id()] = instance instances.append(instance) return instances
def __init__(self, name, config, comp=None, net=None, server=False): Instance.__init__(self, name, config, comp, net) self.is_server = server if 'I' in config.protocols: self.ping = PingTool(self) else: self.ping = None if config.tp_tool: self.tp_tool = config.tp_tool(self, config.perf_tool_path) else: self.tp_tool = None # Override the config drive option to save in instance if config.config_drive: self.config_drive = True else: self.config_drive = None
def create( self, image=None, flavor_type=None, ssh_access=None, nics=None, az=None, management_network_name=None, sec_group=None, init_file_name=None, ): """Create an instance :return: True on success, False on error """ rc = Instance.create( self, image, flavor_type, ssh_access, nics, az, management_network_name, sec_group, init_file_name ) if not rc: return False if self.tp_tool and not self.tp_tool.install(): return False self.add_multicast_route() if not self.is_server: return True if self.tp_tool and not self.tp_tool.start_server(): return False return True
def createInstancesFromFile(fileName = None): """Create the Instance objects from the file and expose them in a list""" if fileName == None: fileName = DataReader.INSTANCES_FILE for instRow in DataReader.__getRows(fileName): instance = Instance(int(instRow[0]), int(instRow[1]), int(instRow[2])) host = DataReader.findHostByID(instance.hostID) if host is not None: instance.setHost(host) host.addInstance(instance) else: logging.error('CREATE INSTANCE: Orphan instance - Skipping instance') continue logging.debug('DATA READER: Adding instance ' + str(instance.hostID)) DataReader.instances.append(instance)
def on_open(self, request): self.instance = Instance.get() self.uid = request.get_cookie('uid').value self.player = self.instance.add_player(self, request.get_argument('name')) state = self.instance.serialize() state['me'] = self.player.entity.id self.emit('initialize', state)
class TestInstance(unittest.TestCase): def setUp(self): self.i1 = Instance(1, 2, 3) self.i2 = Instance(1, 2, 5) self.i3 = Instance(4, 2, 5) self.i1 = Instance(1, 1, 1) self.host = Host(1, 2, 3) def testCreateInstance(self): newInstance = Instance(1, 2, 3) self.assertIsInstance(newInstance, Instance) def testEquals(self): self.assertNotEqual(self.i1, self.i3) self.assertEqual(self.i1, self.i2) def testSetHost(self): self.i1.setHost(self.host) self.assertIsNotNone(self.i1.host)
def registerInstance(self, theInstName): """ Register instance to dependator >>> dep = Dependator() >>> dep.registerInstance('THE_INSTANCE') # doctest: +ELLIPSIS <instance.Instance object at 0x...> >>> dep.instances # doctest: +ELLIPSIS {'THE_INSTANCE': <instance.Instance object at 0x...>} Second registration with the same name should not add any new entry >>> dep.registerInstance('THE_INSTANCE') >>> dep.instances # doctest: +ELLIPSIS {'THE_INSTANCE': <instance.Instance object at 0x...>} :type theInstName: str :param theInstName: Instance name to register :rtype: Instance :return: New instance registered in dependator :raise RegistrationException: Registration exception if the trigger method was not registered properly. """ if __debug__: self.logger.debug('registerInstance %s' % (theInstName, )) if theInstName in self.instances: return None instance = Instance(theInstName) # register notificator triggers for state changes for st in State.ALL: result = self.notificator.registerTrigger(self.triggerHandler, theInstName, st) instance.stateTriggers[st] = result[notificator.ID] self.instances[theInstName] = instance return instance
def __init__(self, config): self.tracer = Tracer(self) self.sampler = Sampler(config.samples) self.view_plane = ViewPlane() self.background_color = RGBColor(0.1, 0.1, 0.1) self.camera = Camera() self.geometry = [] self.lights = [] self.data = numpy.zeros((self.view_plane.vres, self.view_plane.hres, 3), dtype=numpy.uint8) mesh = load_obj("teapot.obj") triangles = [] if config.debug: for face in mesh.faces: triangle = SmoothMeshTriangle(mesh, face, material=Phong(RGBColor.Red())) self.add_geometry(triangle) else: for face in mesh.faces: triangle = Instance(SmoothMeshTriangle(mesh, face, material=Normal())) triangle.scale(0.02, 0.02, 0.02) triangle.rotate_y(math.pi/10.0) triangle.translate(-1, -1.2, -5) triangles.append(triangle) disk = Instance(Disk(Point3(0, 0, 0), Vector3(0, 0, 1), 5, material=Matte(RGBColor(0.9, 0.9, 0.9)))) disk.translate(0, 0, -8) self.add_geometry(disk) # sphere = Instance(Sphere(Point3(0, 0, 0), 1, material=Matte(RGBColor.Red()))) # sphere.translate(-1, 0, -5) grid = Grid() grid.setup(triangles) self.add_geometry(grid) self.ambient_light = AmbientLight() self.add_point_light(PointLight(Point3(-2.5, -1.5, -1.0)))
def __init__(self, name, conf, os_auth_info): BaseClient.__init__(self, name, conf, os_auth_info) self.handle = client.Client( os_auth_info["username"], os_auth_info["password"], os_auth_info["tenant_name"], os_auth_info["auth_url"], insecure=True, service_type="compute") # Maybe the client doesn't prefer ssh route self.ssh_info = conf.get("ssh", None) servers = [] try: servers.extend(self.handle.servers.list()) except NotFound: logging.warn("No servers present for client %s" % name) self.pattern = re.compile("^" + os_auth_info["username"] + "-[0-9]{3}") for inst in servers: if not self.pattern.match(inst.name): continue instance = Instance(inst, self.ssh_info) instanceId = instance.get_id() self.id2inst[instanceId] = instance vols = [] try: vols.extend(self.handle.volumes.get_server_volumes(instanceId)) except NotFound: logging.warn("No volume attached for instance %s(%s)" % (instance.get_name(), instance.get_id())) volumes = self.id2vols[instanceId] = [] for vol in vols: volumes.append(Volume(instance, vol))
def load_instances(instance_strs, word_vectors): ''' Load training examples Args: instance_strs: each string is a training example word_vectors: an instance of vec.wordvector Return: instances: a list of Instance ''' instances = [Instance.parse_from_str(i, word_vectors) for i in instance_strs] total_internal_node = 0 for instance in instances: total_internal_node += (len(instance.words)-1) return instances, total_internal_node
def create(self, image=None, flavor_type=None, keypair=None, nics=None, az=None, management_network_name=None, sec_group=None, init_file_name=None): '''Create an instance :return: True on success, False on error ''' rc = Instance.create(self, image, flavor_type, keypair, nics, az, management_network_name, sec_group, init_file_name) if not rc: return False if self.tp_tool and not self.tp_tool.install(): return False if not self.is_server: return True if self.tp_tool and not self.tp_tool.start_server(): return False return True
print "Reading Dataset..." for element in input_values: classes.add(element[0]) for element in input_values: letter = element[0] out = np.zeros((1, 26)) out[0, ord(letter) - 65] = 1 element = np.delete(element, 0) element = element.astype(np.float) element = element.reshape((1, element.shape[0])) inst = Instance() inst.attributes = element inst.output_values = out inst.normalize() instances.append(inst) print "...Ready!" print "Instantiate neuralnet..." ann = NeuralNet([16, 32, 26], True) ann.learning_rate = 0.09 ann.momentum = 0.44 ann.instances(instances) print "...Ready!" print "Generate model..." ann.train(50, True) print "...Ready!"
def main(): inst1 = Instance() inst1.attributes = np.array([[1,1]]) inst1.output_values = np.array([[0]]) inst2 = Instance() inst2.attributes = np.array([[0,1]]) inst2.output_values = np.array([[1]]) inst3 = Instance() inst3.attributes = np.array([[1,0]]) inst3.output_values = np.array([[1]]) inst4 = Instance() inst4.attributes = np.array([[0,0]]) inst4.output_values = np.array([[0]]) ann = NeuralNet([2, 2, 1], True) ann.learning_rate = 0.5 ann.momentum = 0.2 instances = list() instances.append(inst1) instances.append(inst2) instances.append(inst3) instances.append(inst4) ann.instances(instances) t = time.time() * 1000 ann.train(1800) t = (time.time() * 1000) - t ann.instance(inst1) ann.feed_forward() print ann ann.instance(inst2) ann.feed_forward() print ann ann.instance(inst3) ann.feed_forward() print ann ann.instance(inst4) ann.feed_forward() print ann print "Time: " + str(t)
def dispose(self): if self.tp_tool: self.tp_tool.dispose() Instance.dispose(self)
opts, args = getopt.getopt(sys.argv[1:],"d") for o,a in opts: if o == "-d": _debug=1 else: assert False, "opcao desconhecida" if len(sys.argv)==1: sys.exit("precisa passar a instancia") for argv in sys.argv[1:]: _ls_runs=0 _ils_runs=0 print " ----- " print "instancia: %s" % argv _instance = Instance(argv,"bz2") _instance.load() size = _instance.size() hcost = _instance.hipotetical_cost() for i in xrange(size -1): permutations.append(Permutation(size,[i, i+1])) shakes = [] k= int(floor(size/4)) if _debug>0: print "size: %d; k: %d; 4*k: %d" % (size, k, 4*k) for s in range(k): p ={} p1={} p2={}
def load_vectors(filename, start, end): samples = get_samples(filename, start, end) frames = samples_to_16ms_frames(samples) return Instance.make_instances(frames)
test_intances = [] k = 5 f = open("data/train.csv",'r') is_first_row = True i = 0 print "parsing.." temp_instances = [] for line in f: if(is_first_row): is_first_row = False; continue elements = line.split(',') inst = Instance(int(elements[0])) features = [] for element in (elements[1:]): value = int(element) features.append(value) inst.set_features(features) temp_instances.append(inst) i = i + 1 train_intances = temp_instances[:2000] test_intances = temp_instances[-num_test_instances:] print "done." print len(test_intances)
def get_status(instance_id): instancee = Instance(id=instance_id) if not instancee.is_valid(): return Response(to_json(None), mimetype=_JSON_MIME, status=404) return Response(to_json(instancee.get_status()), mimetype=_JSON_MIME)
def send_update(instance_id): logger.debug('posting update: ' + json_util.dumps(request.get_json())) instancee = Instance(id=instance_id) instancee.update(json.dumps(request.get_json())) _db.update_instance(instancee) return Response(instancee.json_instance, mimetype=_JSON_MIME)
def get_latest_updates(instance_id, update_id): logger.info('getting latest updates since update ' + update_id) instancee = Instance(id=instance_id) updates = to_json(instancee.get_latest_updates(update_id)) logger.info('returning updates: ' + str(updates)) return Response(updates, mimetype=_JSON_MIME)
def __init__(self, conf_file): Instance.__init__(self, conf_file) self.tenant_id = self.nova.client.tenant_id self.instance_quota = self.nova.quotas.get(self.tenant_id).instances self.floatingip_quota = self.nova.quotas.get( self.tenant_id).floating_ips
def __init__(self, conf_file): Instance.__init__(self, conf_file)