Beispiel #1
0
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Returns a set of trial neural architecture, as a serializable object.

        Parameters
        ----------
        parameter_id : int
        """
        if not self.history:
            self.init_search()

        new_father_id = None
        generated_graph = None
        if not self.training_queue:
            new_father_id, generated_graph = self.generate()
            new_model_id = self.model_count
            self.model_count += 1
            self.training_queue.append(
                (generated_graph, new_father_id, new_model_id))
            self.descriptors.append(generated_graph.extract_descriptor())

        graph, father_id, model_id = self.training_queue.pop(0)

        # from graph to json
        json_model_path = os.path.join(self.path, str(model_id) + ".json")
        json_out = graph_to_json(graph, json_model_path)
        self.total_data[parameter_id] = (json_out, father_id, model_id)

        return json_out
Beispiel #2
0
    def test_graph_json_transform(self):
        """ unittest for graph_json_transform function
        """

        graph_init = CnnGenerator(10, (32, 32, 3)).generate()
        graph_init = to_wider_graph(deepcopy(graph_init))
        graph_init = to_deeper_graph(deepcopy(graph_init))
        graph_init = to_skip_connection_graph(deepcopy(graph_init))
        json_out = graph_to_json(graph_init, "temp.json")

        graph_recover = json_to_graph(json_out)

        # compare all data in graph
        self.assertEqual(graph_init.input_shape, graph_recover.input_shape)
        self.assertEqual(graph_init.weighted, graph_recover.weighted)
        self.assertEqual(
            graph_init.layer_id_to_input_node_ids,
            graph_recover.layer_id_to_input_node_ids,
        )
        self.assertEqual(graph_init.adj_list, graph_recover.adj_list)
        self.assertEqual(graph_init.reverse_adj_list,
                         graph_recover.reverse_adj_list)
        self.assertEqual(len(graph_init.operation_history),
                         len(graph_recover.operation_history))
        self.assertEqual(graph_init.n_dim, graph_recover.n_dim)
        self.assertEqual(graph_init.conv, graph_recover.conv)
        self.assertEqual(graph_init.batch_norm, graph_recover.batch_norm)
        self.assertEqual(graph_init.vis, graph_recover.vis)

        node_list_init = [node.shape for node in graph_init.node_list]
        node_list_recover = [node.shape for node in graph_recover.node_list]
        self.assertEqual(node_list_init, node_list_recover)
        self.assertEqual(len(graph_init.node_to_id),
                         len(graph_recover.node_to_id))
        layer_list_init = [
            layer_description_extractor(item, graph_init.node_to_id)
            for item in graph_init.layer_list
        ]
        layer_list_recover = [
            layer_description_extractor(item, graph_recover.node_to_id)
            for item in graph_recover.layer_list
        ]
        self.assertEqual(layer_list_init, layer_list_recover)

        node_to_id_init = [
            graph_init.node_to_id[node] for node in graph_init.node_list
        ]
        node_to_id_recover = [
            graph_recover.node_to_id[node] for node in graph_recover.node_list
        ]
        self.assertEqual(node_to_id_init, node_to_id_recover)

        layer_to_id_init = [
            graph_init.layer_to_id[layer] for layer in graph_init.layer_list
        ]
        layer_to_id_recover = [
            graph_recover.layer_to_id[layer]
            for layer in graph_recover.layer_list
        ]
        self.assertEqual(layer_to_id_init, layer_to_id_recover)
Beispiel #3
0
    def test_to_deeper_graph(self):
        """ unittest for to_deeper_graph function
        """

        graph_init = CnnGenerator(10, (32, 32, 3)).generate()
        json_out = graph_to_json(graph_init, "temp.json")
        graph_recover = json_to_graph(json_out)
        deeper_graph = to_deeper_graph(deepcopy(graph_recover))
        model = deeper_graph.produce_torch_model()
        out = model(torch.ones(1, 3, 32, 32))
        self.assertEqual(out.shape, torch.Size([1, 10]))
    def fake_generate_parameters(self, parameter_id, **kwargs):
        """
        Returns a initialized model.
        """
        self.init_search()

        new_father_id = None
        generated_graph = None

        graph, father_id, model_id = self.training_queue.pop(0)

        # from graph to json
        json_model_path = os.path.join(self.path, str(model_id) + ".json")
        json_out = graph_to_json(graph, json_model_path)
        self.total_data[parameter_id] = (json_out, father_id, model_id)

        return json_out
    def generate_parameters(self, parameter_id, **kwargs):
        """
        Returns a set of trial neural architecture, as a serializable object.

        Parameters
        ----------
        parameter_id : int
        """
        #If there is no history, slave node will use the fake model.
        if not self.history:
            print("If there is no history, generate_parameters should not be called!")
            exit(1)
        total_start=time.time()
        rate = 1

        if (os.path.exists(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/generate_time") and os.path.exists(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/train_time")):
            with open(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/generate_time", "r") as f:
                generate_time = float(f.read())
            with open(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/train_time", "r") as f:
                train_time = float(f.read())
            if (generate_time != 0) and (train_time != 0):
                realrate = int(train_time / generate_time)
                if (realrate < 5) and (realrate > 1):
                    rate = int(realrate)
                if (realrate <= 1):
                    rate = 1

        for i in range(rate):
            start=time.time()
            new_father_id = None
            generated_graph = None
            if not self.training_queue:
                new_father_id, generated_graph = self.generate()
                father_id,json_out,new_model_id = self.total_data[parameter_id]
                self.training_queue.append((generated_graph, new_father_id, new_model_id))
                #self.descriptors.append(generated_graph.extract_descriptor())
            else:
                print("training_queue should be an empty list.")
                exit(1)

            graph, father_id, model_id = self.training_queue.pop(0)
        # from graph to json
            json_model_path = os.path.join(self.path, str(model_id) + ".json")
            json_out = graph_to_json(graph, json_model_path)
            end=time.time()
        #self.total_data[parameter_id] = (json_out, father_id, model_id)
            json_and_id="json_out="+str(json_out)+"+father_id="+str(father_id)+"+parameter_id="+str(parameter_id)+"+history="+"True"
            lock.acquire()
            with open(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/trials/" + str(nni.get_trial_id()) + "/output.log","a+") as f:
                f.write("single_generate=" + str(end - start)+"\n")

            with open(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/graph.txt","a+") as f:
                f.write(json_and_id+"\n")
            lock.release()
        total_end=time.time()
        lock.acquire()
        with open(os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/trials/" + str(nni.get_trial_id()) + "/output.log","a+") as f:
            f.write("total_generate=" + str(total_end - total_start)+"\n")
        lock.release()

        totime = total_end - total_start
        if totime<0:
            totime = 0-totime

        with open (os.environ["HOME"] + "/mountdir/nni/experiments/" + str(nni.get_experiment_id()) + "/generate_time","w+") as f:
            gt = totime/rate
            f.write(str(gt))