def _get_flowgraph_from_grcfile(filename):
        platform = Platform()
        data = platform.parse_flow_graph(filename)

        fg = platform.get_new_flow_graph()
        fg.import_data(data)
        fg.grc_file_path = os.path.abspath(filename)
        fg.validate()

        if not fg.is_valid():
            raise StandardError("Compilation error")

        return fg
Esempio n. 2
0
    def from_grc(filename):
        """
        read .grc file to extract variables
        """
        platform = Platform()
        data = platform.parse_flow_graph(filename)

        fg = platform.get_new_flow_graph()
        fg.import_data(data)
        fg.grc_file_path = os.path.abspath(filename)
        fg.validate()

        if not fg.is_valid():
            raise StandardError("Compilation error")
        _task = task("","")
        for var in fg.get_variables():
            _task.set_parametrization(var.get_id(), parametrization(STATIC, var.get_var_value()))
        return _task
    def execute_all(self, task, storage=None):
        """
        execute all parametrizations as defined in task object

        Parameters
        ----------

        task -- the task which is broken down into the individual execution points
        storage -- if set to a string, will store the result in that path as JSON. If set to a file, will use that.

        Returns
        -------

        results -- the results of the computation in a list of result objects.
        """
        instruction = task._task_type
        # print "running a total number of points of", task.get_total_points()
        setters = {}
        if instruction == bt.RUN_FG:
            class_n = task.class_name
            time.sleep(1)
            module_n = str(task.module_name)
            module = __import__(module_n, fromlist=[str(class_n)])
            self.block_class = getattr(module, task.class_name)
        elif instruction == bt.RUN_GRC:
            if not hasattr(self, "temp_outdir"):
                self.temp_outdir = tempfile.mkdtemp(suffix="_py", prefix="gr-mtb-")
            temp_grc_file = tempfile.NamedTemporaryFile(suffix=".grc", delete=False, dir=self.temp_outdir)

            temp_grc_file.write(task.grcxml)
            temp_grc_file.close()

            platform = Platform()
            data = platform.parse_flow_graph(temp_grc_file.name)

            fg = platform.get_new_flow_graph()
            fg.import_data(data)
            fg.grc_file_path = os.path.abspath(temp_grc_file.name)
            fg.validate()

            if not fg.is_valid():
                raise StandardError("Compilation error")
            class_n = fg.get_option("id")
            filepath = os.path.join(self.temp_outdir, class_n + ".py")
            gen = platform.get_generator()(fg, filepath)
            gen.write()

            module = imp.load_source(class_n, filepath)
            self.block_class = getattr(module, class_n)
        results = []
        print "processing {:d} points".format(task.get_total_points())
        for inst, values in self.parameterize(task, self.block_class):
            datadict = self._execute(inst, task.sinks)
            results.append(result(values, datadict))
        try:
            if storage and not hasattr(storage, "write"):
                outfile = open(str(storage), "w")
                json.dump([r.to_dict() for r in results], outfile)
                outfile.close()
            elif storage:
                json.dump([r.to_dict() for r in results], storage)
        except IOError as e:
            print e
        return results
    def execute_all(self, task, storage=None):
        """
        execute all parametrizations as defined in task object

        Parameters
        ----------

        task -- the task which is broken down into the individual execution points
        storage -- if set to a string, will store the result in that path as JSON. If set to a file, will use that.

        Returns
        -------

        results -- the results of the computation in a list of result objects.
        """
        instruction = task._task_type
        #print "running a total number of points of", task.get_total_points()
        setters = {}
        if instruction == bt.RUN_FG:
            class_n = task.class_name
            time.sleep(1)
            module_n = str(task.module_name)
            module = __import__(module_n, fromlist=[str(class_n)])
            self.block_class = getattr(module, task.class_name)
        elif instruction == bt.RUN_GRC:
            if not hasattr(self, "temp_outdir"):
                self.temp_outdir = tempfile.mkdtemp(suffix="_py",
                                                    prefix="gr-mtb-")
            temp_grc_file = tempfile.NamedTemporaryFile(suffix=".grc",
                                                        delete=False,
                                                        dir=self.temp_outdir)

            temp_grc_file.write(task.grcxml)
            temp_grc_file.close()

            platform = Platform()
            data = platform.parse_flow_graph(temp_grc_file.name)

            fg = platform.get_new_flow_graph()
            fg.import_data(data)
            fg.grc_file_path = os.path.abspath(temp_grc_file.name)
            fg.validate()

            if not fg.is_valid():
                raise StandardError("Compilation error")
            class_n = fg.get_option("id")
            filepath = os.path.join(self.temp_outdir, class_n + ".py")
            gen = platform.get_generator()(fg, filepath)
            gen.write()

            module = imp.load_source(class_n, filepath)
            self.block_class = getattr(module, class_n)
        results = []
        print "processing {:d} points".format(task.get_total_points())
        for inst, values in self.parameterize(task, self.block_class):
            datadict = self._execute(inst, task.sinks)
            results.append(result(values, datadict))
        try:
            if storage and not hasattr(storage, "write"):
                outfile = open(str(storage), 'w')
                json.dump([r.to_dict() for r in results], outfile)
                outfile.close()
            elif storage:
                json.dump([r.to_dict() for r in results], storage)
        except IOError as e:
            print e
        return results