def main(argv): # Parses the command-line arguments parser = argparse.ArgumentParser() parser.add_argument( "source_folder", help="The folder with JSON files to parse to add to the database") parser.add_argument("clblast_root", help="Root of the CLBlast sources") parser.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity of the script") cl_args = parser.parse_args(argv) # Parses the path arguments database_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database.json") database_best_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database_best.json") json_files = os.path.join(cl_args.source_folder, "*.json") cpp_database_path = os.path.join(cl_args.clblast_root, "src", "database", "kernels") # Checks whether the command-line arguments are valid clblast_header = os.path.join( cl_args.clblast_root, "include", "clblast.h") # Not used but just for validation if not os.path.isfile(clblast_header): raise RuntimeError( "The path '" + cl_args.clblast_root + "' does not point to the root of the CLBlast library") if len(glob.glob(json_files)) < 1: print("[database] The path '" + cl_args.source_folder + "' does not contain any JSON files") # Downloads the database if a local copy is not present if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) # Loads the database from disk database = io.load_database(database_filename) # Loops over all JSON files in the supplied folder for file_json in glob.glob(json_files): # Loads the newly imported data sys.stdout.write("[database] Processing '" + file_json + "' ") # No newline printed imported_data = io.load_tuning_results(file_json) # Fixes the problem that some vendors use multiple different names for target in VENDOR_TRANSLATION_TABLE: if imported_data["device_vendor"] == target: imported_data["device_vendor"] = VENDOR_TRANSLATION_TABLE[ target] # Adds the new data to the database old_size = db.length(database) database = db.add_section(database, imported_data) new_size = db.length(database) print("with " + str(new_size - old_size) + " new items") # Newline printed here # Stores the modified database back to disk if len(glob.glob(json_files)) >= 1: io.save_database(database, database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults(database, cl_args.verbose) database_best_results["sections"].extend(database_defaults["sections"]) # Optionally outputs the database to disk if cl_args.verbose: io.save_database(database_best_results, database_best_filename) # Outputs the database as a C++ database print("[database] Producing a C++ database in '" + cpp_database_path + "'...") clblast.print_cpp_database(database_best_results, cpp_database_path) print("[database] All done")
def main(argv): # Parses the command-line arguments parser = argparse.ArgumentParser() parser.add_argument("source_folder", help="The folder with JSON files to parse to add to the database") parser.add_argument("clblast_root", help="Root of the CLBlast sources") parser.add_argument("-r", "--remove_device", type=str, default=None, help="Removes all entries for a specific device") parser.add_argument("--add_tuning_parameter", type=str, default=None, help="Adds this parameter to existing entries") parser.add_argument("--add_tuning_parameter_for_kernel", type=str, default=None, help="Adds the above parameter for this kernel") parser.add_argument("--add_tuning_parameter_value", type=int, default=0, help="Set this value as the default for the above parameter") parser.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity of the script") cl_args = parser.parse_args(argv) # Parses the path arguments database_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database.json") database_best_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database_best.json") json_files = os.path.join(cl_args.source_folder, "*.json") cpp_database_path = os.path.join(cl_args.clblast_root, "src", "database", "kernels") # Checks whether the command-line arguments are valid clblast_header = os.path.join(cl_args.clblast_root, "include", "clblast.h") # Not used but just for validation if not os.path.isfile(clblast_header): raise RuntimeError("The path '" + cl_args.clblast_root + "' does not point to the root of the CLBlast library") if len(glob.glob(json_files)) < 1: print("[database] The path '" + cl_args.source_folder + "' does not contain any JSON files") # Downloads the database if a local copy is not present if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) # Loads the database from disk database = io.load_database(database_filename) # Loops over all JSON files in the supplied folder for file_json in glob.glob(json_files): sys.stdout.write("[database] Processing '" + file_json + "' ") # No newline printed try: # Loads the newly imported data imported_data = io.load_tuning_results(file_json) # Adds the new data to the database old_size = db.length(database) database = db.add_section(database, imported_data) new_size = db.length(database) print("with " + str(new_size - old_size) + " new items") # Newline printed here except ValueError: print("--- WARNING: invalid file, skipping") # Checks for tuning results with mis-matched entries remove_mismatched_arguments(database) # Stores the modified database back to disk if len(glob.glob(json_files)) >= 1: io.save_database(database, database_filename) # Removes database entries before continuing if cl_args.remove_device is not None: print("[database] Removing all results for device '%s'" % cl_args.remove_device) remove_database_entries(database, {"clblast_device_name": cl_args.remove_device}) #, "kernel_family": "xgemm"}) io.save_database(database, database_filename) # Adds new tuning parameters to existing database entries if cl_args.add_tuning_parameter is not None and\ cl_args.add_tuning_parameter_for_kernel is not None: print("[database] Adding tuning parameter: '%s' for kernel '%s' with default %d" % (cl_args.add_tuning_parameter, cl_args.add_tuning_parameter_for_kernel, cl_args.add_tuning_parameter_value)) add_tuning_parameter(database, cl_args.add_tuning_parameter, cl_args.add_tuning_parameter_for_kernel, cl_args.add_tuning_parameter_value) io.save_database(database, database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults(database, cl_args.verbose) database_best_results["sections"].extend(database_defaults["sections"]) # Optionally outputs the database to disk if cl_args.verbose: io.save_database(database_best_results, database_best_filename) # Outputs the database as a C++ database print("[database] Producing a C++ database in '" + cpp_database_path + "'...") clblast.print_cpp_database(database_best_results, cpp_database_path) print("[database] All done")
def main(argv): # Parses the command-line arguments parser = argparse.ArgumentParser() parser.add_argument( "source_folder", help="The folder with JSON files to parse to add to the database") parser.add_argument("clblast_root", help="Root of the CLBlast sources") parser.add_argument("-r", "--remove_device", type=str, default=None, help="Removes all entries for a specific device") parser.add_argument("--add_tuning_parameter", type=str, default=None, help="Adds this parameter to existing entries") parser.add_argument("--add_tuning_parameter_for_kernel", type=str, default=None, help="Adds the above parameter for this kernel") parser.add_argument( "--add_tuning_parameter_value", type=int, default=0, help="Set this value as the default for the above parameter") parser.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity of the script") cl_args = parser.parse_args(argv) # Parses the path arguments database_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database.json") database_best_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database_best.json") json_files = os.path.join(cl_args.source_folder, "*.json") cpp_database_path = os.path.join(cl_args.clblast_root, "src", "database", "kernels") # Checks whether the command-line arguments are valid clblast_header = os.path.join( cl_args.clblast_root, "include", "clblast.h") # Not used but just for validation if not os.path.isfile(clblast_header): raise RuntimeError( "The path '" + cl_args.clblast_root + "' does not point to the root of the CLBlast library") if len(glob.glob(json_files)) < 1: print("[database] The path '" + cl_args.source_folder + "' does not contain any JSON files") # Downloads the database if a local copy is not present if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) # Loads the database from disk database = io.load_database(database_filename) # Loops over all JSON files in the supplied folder for file_json in glob.glob(json_files): sys.stdout.write("[database] Processing '" + file_json + "' ") # No newline printed try: # Loads the newly imported data imported_data = io.load_tuning_results(file_json) # Adds the new data to the database old_size = db.length(database) database = db.add_section(database, imported_data) new_size = db.length(database) print("with " + str(new_size - old_size) + " new items") # Newline printed here except ValueError: print("--- WARNING: invalid file, skipping") # Checks for tuning results with mis-matched entries remove_mismatched_arguments(database) # Stores the modified database back to disk if len(glob.glob(json_files)) >= 1: io.save_database(database, database_filename) # Removes database entries before continuing if cl_args.remove_device is not None: print("[database] Removing all results for device '%s'" % cl_args.remove_device) remove_database_entries(database, {"clblast_device_name": cl_args.remove_device}) #, "kernel_family": "xgemm"}) io.save_database(database, database_filename) # Adds new tuning parameters to existing database entries if cl_args.add_tuning_parameter is not None and\ cl_args.add_tuning_parameter_for_kernel is not None: print( "[database] Adding tuning parameter: '%s' for kernel '%s' with default %d" % (cl_args.add_tuning_parameter, cl_args.add_tuning_parameter_for_kernel, cl_args.add_tuning_parameter_value)) add_tuning_parameter(database, cl_args.add_tuning_parameter, cl_args.add_tuning_parameter_for_kernel, cl_args.add_tuning_parameter_value) io.save_database(database, database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults(database, cl_args.verbose) database_best_results["sections"].extend(database_defaults["sections"]) # Optionally outputs the database to disk if cl_args.verbose: io.save_database(database_best_results, database_best_filename) # Outputs the database as a C++ database print("[database] Producing a C++ database in '" + cpp_database_path + "'...") clblast.print_cpp_database(database_best_results, cpp_database_path) print("[database] All done")
def ck_preprocess(i): ck = i['ck_kernel'] del i['ck_kernel'] rt = i['run_time'] deps = i['deps'] env = i.get('env', {}) pass_to_make = i pli = i['misc'] rr = {} #print (json.dumps(deps['lib-clblast'], indent=2)) #print deps['lib-clblast']['uoa'] # Load both stderr and stdout. Concatenate into one list. # NB: This assumes that Caffe iterates only once (--iterations=1). # Otherwise, looping over the log would be required. # for target in VENDOR_TRANSLATION_TABLE: # if d["device_vendor"] == target: # d["device_vendor"] = VENDOR_TRANSLATION_TABLE[target] tos = pli['target_os_uoa'] tdid = pli['device_id'] adf = pli['add_to_features'] #compiler=adf['gpgpu'][0]['gpgpu_deps']['compiler']['uoa'] #print (json.dumps(i['env'], indent=2)) docompile = int(env['CK_FORCE_RECOMPILE']) if docompile == 0: print("[CK_FORCE_RECOMPILE] Exit %s" % (env['CK_FORCE_RECOMPILE'])) rr["return"] = 0 return rr #print tos, tdid) print("[CK_FORCE_RECOMPILE] %s" % (env['CK_FORCE_RECOMPILE'])) #GET DEFAULT VALUE from CLBlast deps_cb = deps['lib-clblast'] uoa = deps_cb['uoa'] b = deps_cb['cus'] pl = b['path_lib'] #bench="xgemm" #bench +=".hpp" pl = pl.split("install")[0] #### VERIFY WITH INSTALL SCRIPT pl_suff = "src/scripts/database/" pk = pl pk_suff = "src/src/database/kernels/" pl += pl_suff pl1 = pl + '/database' pk += pk_suff # print pl, pk sys.path.append(pl) sys.path.append(pl1) #### import database.io as io import database.db as db import database.clblast as clblast import database.bests as bests import database.defaults as defaults best_filename = "database_best.json" if not os.path.isfile(best_filename): print("[database] database_best.json not found") database_filename = pl + "database.json" if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) else: print("[database] DB found") if os.path.isfile(database_filename): database = io.load_database(database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults( database, 0) #second args denotes VERBOSE or NOT database_best_results["sections"].extend(database_defaults["sections"]) database_best_filename = 'database_best.json' io.save_database(database_best_results, database_best_filename) # Optionally outputs the database to disk #### TEST to get best and default param else: print("[database] database_best.json found") print("[database] Loading %s" % (best_filename)) database_best_results = json.loads(open(best_filename).read()) best = database_best_results['sections'] print("[Tuning] Checking new best configuration") ### loadfile To generilize mybestf = env['CK_CLBLAST_BEST_CONF_FILE'] mybestd = {} if os.path.isfile(mybestf): mybestd = json.loads(open(mybestf).read()) del mybestd['data'] ##### MYFIND, k = ck2clblast(best, mybestd) else: MYFIND = 0 if MYFIND: print("[Tuning] Modify databese_best entries") print("[Tuning] Creating new kernels directory") cp = os.getcwd() src_new_kernels = cp + "/kernels_tmp" if not os.path.exists(src_new_kernels): os.makedirs(src_new_kernels) else: print("[Tuning] %s already exists" % (src_new_kernels)) print("[Tuning] wrinting new kernel: %s " % (src_new_kernels)) clblast.print_cpp_database(database_best_results, src_new_kernels) rr = make(src_new_kernels, pk, tos, tdid, uoa, k) else: print("[Tuning] Nothing to do") print("[Tuning] Exit") rr['return'] = 0 #### NO ADD STUFF BELOW return rr
def ck_preprocess(i): ck=i['ck_kernel'] del i['ck_kernel'] rt=i['run_time'] deps=i['deps'] env=i.get('env',{}) pass_to_make = i pli = i['misc'] rr={} #print (json.dumps(deps['lib-clblast'], indent=2)) #print deps['lib-clblast']['uoa'] # Load both stderr and stdout. Concatenate into one list. # NB: This assumes that Caffe iterates only once (--iterations=1). # Otherwise, looping over the log would be required. # for target in VENDOR_TRANSLATION_TABLE: # if d["device_vendor"] == target: # d["device_vendor"] = VENDOR_TRANSLATION_TABLE[target] tos = pli['target_os_uoa'] tdid = pli['device_id'] adf=pli['add_to_features'] #compiler=adf['gpgpu'][0]['gpgpu_deps']['compiler']['uoa'] #print (json.dumps(i['env'], indent=2)) docompile=int(env['CK_FORCE_RECOMPILE']) if docompile == 0: print("[CK_FORCE_RECOMPILE] Exit %s" %(env['CK_FORCE_RECOMPILE'])) rr["return"] = 0 return rr #print tos, tdid) print("[CK_FORCE_RECOMPILE] %s" %(env['CK_FORCE_RECOMPILE'])) #GET DEFAULT VALUE from CLBlast deps_cb= deps['lib-clblast'] uoa= deps_cb['uoa'] b=deps_cb['cus'] pl = b['path_lib'] #bench="xgemm" #bench +=".hpp" pl=pl.split("install")[0] #### VERIFY WITH INSTALL SCRIPT pl_suff= "src/scripts/database/" pk=pl pk_suff="src/src/database/kernels/" pl += pl_suff pl1 = pl+'/database' pk += pk_suff # print pl, pk sys.path.append(pl) sys.path.append(pl1) #### import database.io as io import database.db as db import database.clblast as clblast import database.bests as bests import database.defaults as defaults best_filename="database_best.json" if not os.path.isfile(best_filename): print("[database] database_best.json not found") database_filename=pl+"database.json" if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) else: print ("[database] DB found") if os.path.isfile(database_filename): database = io.load_database(database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults(database, 0) #second args denotes VERBOSE or NOT database_best_results["sections"].extend(database_defaults["sections"]) database_best_filename='database_best.json' io.save_database(database_best_results, database_best_filename) # Optionally outputs the database to disk #### TEST to get best and default param else: print("[database] database_best.json found") print("[database] Loading %s" % (best_filename)) database_best_results = json.loads(open(best_filename).read()) best = database_best_results['sections'] print("[Tuning] Checking new best configuration") ### loadfile To generilize mybestf=env['CK_CLBLAST_BEST_CONF_FILE'] mybestd={} if os.path.isfile(mybestf): mybestd = json.loads(open(mybestf).read()) del mybestd['data'] ##### MYFIND, k = ck2clblast(best, mybestd) else: MYFIND = 0 if MYFIND: print("[Tuning] Modify databese_best entries") print("[Tuning] Creating new kernels directory") cp = os.getcwd() src_new_kernels = cp+"/kernels_tmp" if not os.path.exists(src_new_kernels): os.makedirs(src_new_kernels) else: print("[Tuning] %s already exists" % (src_new_kernels)) print("[Tuning] wrinting new kernel: %s " % (src_new_kernels)) clblast.print_cpp_database(database_best_results, src_new_kernels) rr = make(src_new_kernels, pk, tos , tdid, uoa, k) else: print("[Tuning] Nothing to do") print("[Tuning] Exit") rr['return']=0 #### NO ADD STUFF BELOW return rr
def main(argv): # Parses the command-line arguments parser = argparse.ArgumentParser() parser.add_argument("source_folder", help="The folder with JSON files to parse to add to the database") parser.add_argument("clblast_root", help="Root of the CLBlast sources") parser.add_argument("-r", "--remove_device", type=str, default=None, help="Removes all entries for a specific device") parser.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity of the script") cl_args = parser.parse_args(argv) # Parses the path arguments database_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database.json") database_best_filename = os.path.join(cl_args.clblast_root, "scripts", "database", "database_best.json") json_files = os.path.join(cl_args.source_folder, "*.json") cpp_database_path = os.path.join(cl_args.clblast_root, "src", "database", "kernels") # Checks whether the command-line arguments are valid clblast_header = os.path.join(cl_args.clblast_root, "include", "clblast.h") # Not used but just for validation if not os.path.isfile(clblast_header): raise RuntimeError("The path '" + cl_args.clblast_root + "' does not point to the root of the CLBlast library") if len(glob.glob(json_files)) < 1: print("[database] The path '" + cl_args.source_folder + "' does not contain any JSON files") # Downloads the database if a local copy is not present if not os.path.isfile(database_filename): io.download_database(database_filename, DATABASE_SERVER_URL) # Loads the database from disk database = io.load_database(database_filename) # Loops over all JSON files in the supplied folder for file_json in glob.glob(json_files): # Loads the newly imported data sys.stdout.write("[database] Processing '" + file_json + "' ") # No newline printed imported_data = io.load_tuning_results(file_json) # Fixes the problem that some vendors use multiple different names for target in VENDOR_TRANSLATION_TABLE: if imported_data["device_vendor"] == target: imported_data["device_vendor"] = VENDOR_TRANSLATION_TABLE[target] # Adds the new data to the database old_size = db.length(database) database = db.add_section(database, imported_data) new_size = db.length(database) print("with " + str(new_size - old_size) + " new items") # Newline printed here # Checks for tuning results with mis-matched entries remove_mismatched_arguments(database) # Stores the modified database back to disk if len(glob.glob(json_files)) >= 1: io.save_database(database, database_filename) # Removes database entries before continuing if cl_args.remove_device is not None: print("[database] Removing all results for device '%s'" % cl_args.remove_device) remove_database_entries(database, {"device": cl_args.remove_device}) io.save_database(database, database_filename) # Retrieves the best performing results print("[database] Calculating the best results per device/kernel...") database_best_results = bests.get_best_results(database) # Determines the defaults for other vendors and per vendor print("[database] Calculating the default values...") database_defaults = defaults.calculate_defaults(database, cl_args.verbose) database_best_results["sections"].extend(database_defaults["sections"]) # Optionally outputs the database to disk if cl_args.verbose: io.save_database(database_best_results, database_best_filename) # Outputs the database as a C++ database print("[database] Producing a C++ database in '" + cpp_database_path + "'...") clblast.print_cpp_database(database_best_results, cpp_database_path) print("[database] All done")