Пример #1
0
 def get_user_suffix(self, index=0):
     """ Get the suffix of the kernel filename, if the user specified one. Return None otherwise.
     """
     if util.looks_like_a_filename(self.kernel_sources[index]) and (
             "." in self.kernel_sources[index]):
         return "." + self.kernel_sources[index].split(".")[-1]
     return None
Пример #2
0
    def prepare_list_of_files(self, kernel_name, params, grid, threads, block_size_names):
        """ prepare the kernel string along with any additional files

        The first file in the list is allowed to include or read in the others
        The files beyond the first are considered additional files that may also contain tunable parameters

        For each file beyond the first this function creates a temporary file with
        preprocessors statements inserted. Occurences of the original filenames in the
        first file are replaced with their temporary counterparts.

        :param kernel_name: A string specifying the kernel name.
        :type kernel_name: string

        :param params: A dictionary with the tunable parameters for this particular
            instance.
        :type params: dict()

        :param grid: The grid dimensions for this instance. The grid dimensions are
            also inserted into the code as if they are tunable parameters for
            convenience.
        :type grid: tuple()

        :param threads: The thread block dimensions for this instance. The thread block are
            also inserted into the code as if they are tunable parameters for
            convenience.
        :type threads: tuple()

        :param block_size_names: A list of strings that denote the names
            for the thread block dimensions.
        :type block_size_names: list(string)

        """
        temp_files = dict()

        for i, f in enumerate(self.kernel_sources):
            if i > 0 and not util.looks_like_a_filename(f):
                raise ValueError('When passing multiple kernel sources, the secondary entries must be filenames')

            ks = self.get_kernel_string(i, params)
            # add preprocessor statements
            n, ks = util.prepare_kernel_string(kernel_name, ks, params, grid, threads, block_size_names, self.lang)

            if i == 0:
                # primary kernel source
                name = n
                kernel_string = ks
                continue

            # save secondary kernel sources to temporary files

            # generate temp filename with the same extension
            temp_file = util.get_temp_filename(suffix="." + f.split(".")[-1])
            temp_files[f] = temp_file
            util.write_file(temp_file, ks)
            # replace occurences of the additional file's name in the first kernel_string with the name of the temp file
            kernel_string = kernel_string.replace(f, temp_file)

        return name, kernel_string, temp_files
Пример #3
0
def store_results(results_filename,
                  kernel_name,
                  kernel_string,
                  tune_params,
                  problem_size,
                  results,
                  env,
                  top=3,
                  objective=None,
                  objective_higher_is_better=None):
    """ stores tuning results to a JSON file

        Stores the top (3% by default) best kernel configurations in a JSON file.
        The results are stored for a specific device (retrieved using env['device_name'])
        and for a specific problem_size. If the file already exists, new results for
        this device and problem_size will be appended. Any previous results already stored
        in the file for this specific device and problem_size will be overwritten.

        :param results_filename: Filename of the JSON file in which the results will be stored.
            Results will be appended if the file already exists. Existing results within the
            file for the same device and problem_size will be overwritten.
        :type results_filename: string

        :param tune_params: The tunable parameters of this kernel.
        :type tune_params: dict

        :param problem_size: The problem_size this kernel was tuned for
        :type problem_size: tuple

        :param results: A list of dictionaries of all executed kernel configurations and their
            execution times, and possibly other user-defined metrics, as returned by
            tune_kernel().
        :type results: list(dict)

        :param env: A dictionary with information about the environment
            in which the tuning took place. This records device name, properties,
            version info, and so on. Typicaly this dictionary is returned by tune_kernel().
        :type env: dict

        :param top: Denotes the top percentage of results to store in the results file
        :type top: float

        :param objective: Optimization objective to sort results on, consisting of a string
            that also occurs in results as a metric.
        :type objective: string

        :param objective_higher_is_better: A boolean that specifies whether the objective should
            be maximized or minimized.
        :type objective_higher_is_better: bool

    """

    objective, objective_higher_is_better = get_objective_defaults(
        objective, objective_higher_is_better)

    #filter results to only those that contain the objective
    results_filtered = [item for item in results if objective in item]

    #get top results
    if objective_higher_is_better:
        best_config = max(results_filtered, key=lambda x: x[objective])
    else:
        best_config = min(results_filtered, key=lambda x: x[objective])
    best = best_config[objective]
    top_range = top / 100.0

    def top_result(item):
        current = item[objective]
        if objective_higher_is_better:
            return current > best * (1 - top_range)
        return current < best * (1 + top_range)

    top_results = [item for item in results_filtered if top_result(item)]

    #filter result items to just the tunable parameters and the objective
    filter_keys = list(tune_params.keys()) + [objective]
    top_results = [{k: item[k] for k in filter_keys} for item in top_results]

    #read existing results file
    if os.path.isfile(results_filename):
        meta, data = _read_results_file(results_filename)

        #validate consistency between arguments and results file
        if not kernel_name == meta["kernel_name"]:
            raise ValueError(
                "Mismatch between given kernel_name and results file")
        if not all(
            [param in meta["tunable_parameters"] for param in tune_params]):
            raise ValueError(
                "Mismatch between tunable_parameters in results file and tune_params"
            )
        if not objective == meta["objective"]:
            raise ValueError(
                "Mismatch between given objective and results file")
    else:
        #new file
        meta = {}
        meta["version_number"] = "1.0"
        meta["kernel_name"] = kernel_name
        if kernel_string and not callable(kernel_string) and not isinstance(
                kernel_string, list):
            if util.looks_like_a_filename(kernel_string):
                meta["kernel_string"] = util.read_file(kernel_string)
            else:
                meta["kernel_string"] = kernel_string
        meta["objective"] = objective
        meta["objective_higher_is_better"] = objective_higher_is_better
        meta["tunable_parameters"] = list(tune_params.keys())
        data = []

    #insert new results into the list
    if not isinstance(problem_size, (list, tuple)):
        problem_size = (problem_size, )
    problem_size_str = "x".join(str(i) for i in problem_size)

    #replace all non alphanumeric characters with underscore
    dev_name = re.sub('[^0-9a-zA-Z]+', '_', env["device_name"].strip())

    #remove existing entries for this GPU and problem_size combination from the results if any
    data = [
        d for d in data if not (d["device_name"] == dev_name
                                and d["problem_size"] == problem_size_str)
    ]

    #extend the results with the top_results
    results = []
    for result in top_results:
        record = {
            "device_name": dev_name,
            "problem_size": problem_size_str,
            "tunable_parameters": {}
        }
        for k, v in result.items():
            if k in tune_params:
                record["tunable_parameters"][k] = v
        record[objective] = result[objective]
        results.append(record)
    data.extend(results)

    #write output file
    meta["data"] = data
    with open(results_filename, 'w') as fh:
        fh.write(json.dumps(meta, indent=""))