def _getBenchmarkRuns(info, meta, outdir): dir_name = os.path.join( outdir, getFilename(meta["platform"]), getFilename(meta["framework"]), getFilename(meta["net_name"]), getFilename(meta["metric"]), getFilename(meta["identifier"]), ) assert "regression_commits" in info, "regression_commits field is missing from info" dirs = [] for entry in info["regression_commits"]: one_dir = os.path.jon( dir_name, getDirectory(entry["commit"], entry["commit_time"]) ) if not os.path.isdir(one_dir): continue last_run = _getLatestRun(one_dir) if last_run is None: continue dirs.append(last_run) return dirs
def _downloadBinaries(self, info_dict): programs = info_dict["programs"] for bin_name in programs: program_location = programs[bin_name]["location"] self.benchmark_downloader.downloadFile(program_location, None) if program_location.startswith("//"): program_location = self.args.root_model_dir + program_location[ 1:] elif program_location.startswith("http"): replace_pattern = { " ": '-', "\\": '-', ":": '/', } program_location = self.args.root_model_dir + '/' +\ getFilename(program_location, replace_pattern=replace_pattern) elif program_location.startswith("/"): program_location = self.args.root_model_dir + program_location if self.args.platform.startswith("ios") and \ bin_name == "program" and \ not program_location.endswith(".ipa"): new_location = program_location + ".ipa" os.rename(program_location, new_location) program_location = new_location os.chmod(program_location, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR) programs[bin_name]["location"] = program_location
def downloadFile(self, location, md5): if location.startswith("http"): dirs = location.split(":/") replace_pattern = { ' ': '-', '\\': '-', ':': '/', } path = self.root_model_dir + '/' +\ getFilename(location, replace_pattern=replace_pattern) elif location[0:2] != "//": return else: dirs = location[2:].split("/") if len(dirs) <= 2: return path = self.root_model_dir + location[1:] if os.path.isfile(path): if md5: m = hashlib.md5() m.update(open(path, 'rb').read()) new_md5 = m.hexdigest() if md5 == new_md5: getLogger().info("File {}".format(os.path.basename(path)) + " is cached, skip downloading") return else: # assume the file is the same return downloader_controller = DownloadFile(dirs=dirs, logger=self.logger, args=self.args) downloader_controller.download_file(location, path)
def _downloadBinaries(self, info_dict): """Download benchmark binaries and return locations.""" programs = info_dict["programs"] program_locations = [] for bin_name in programs: program_location = programs[bin_name]["location"] self.benchmark_downloader.downloadFile(program_location, None) if program_location.startswith("//"): program_location = self.args.root_model_dir + program_location[ 1:] elif program_location.startswith("http"): replace_pattern = { " ": "-", "\\": "-", ":": "/", } program_location = os.path.join( self.args.root_model_dir, getFilename(program_location, replace_pattern=replace_pattern), ) elif program_location.startswith("/"): program_location = self.args.root_model_dir + program_location if (self.args.platform.startswith("ios") and bin_name == "program" and not program_location.endswith(".ipa")): new_location = program_location + ".ipa" os.rename(program_location, new_location) program_location = new_location os.chmod(program_location, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR) programs[bin_name]["location"] = program_location program_locations.append(program_location) return program_locations
def report(self, content): data = content[self.DATA] if data is None or len(data) == 0: getLogger().info("No data to write") return meta = content[self.META] dirname = None if "identifier" in meta: id_dir = getFilename(meta["identifier"]) dirname = os.path.join(self.simple_local_reporter, id_dir) else: dirname = tempfile.mkdtemp(dir=self.simple_local_reporter, prefix="aibench") if os.path.exists(dirname): shutil.rmtree(dirname, True) os.makedirs(dirname) with open(os.path.join(dirname, "data.txt"), 'w') as file: content_d = json.dumps(data) file.write(content_d) pname = meta[self.PLATFORM] if "platform_hash" in meta: pname = pname + " ({})".format(meta["platform_hash"]) getLogger().info("Writing file for {}: {}".format(pname, dirname))
def downloadFile(self, location, md5): if location.startswith("http"): dirs = location.split(":/") replace_pattern = { " ": "-", "\\": "-", ":": "/", } path = os.path.join( self.root_model_dir, getFilename(location, replace_pattern=replace_pattern), ) elif not location.startswith("//"): return else: dirs = location[2:].split("/") if len(dirs) <= 2: return path = self.root_model_dir + location[1:] if os.path.isfile(path): if md5: getLogger().info("Calculate md5 of {}".format(path)) file_hash = None with open(path, "rb") as f: file_hash = hashlib.md5() for chunk in iter(lambda: f.read(8192), b""): file_hash.update(chunk) new_md5 = file_hash.hexdigest() del file_hash gc.collect() if md5 == new_md5: getLogger().info( "File {}".format(os.path.basename(path)) + " is cached, skip downloading" ) return path else: # assume the file is the same return path downloader_controller = DownloadFile( dirs=dirs, logger=self.logger, args=self.args ) downloader_controller.download_file(location, path) return path
def report(self, content): data = content[self.DATA] if data is None or len(data) == 0: getLogger().info("No data to write") return meta = content[self.META] net_name = meta["net_name"] netdir = getFilename(net_name) platform_name = meta[self.PLATFORM] platformdir = getFilename(platform_name) framework_name = meta["framework"] frameworkdir = getFilename(framework_name) metric_name = meta["metric"] metric_dir = getFilename(metric_name) id_dir = getFilename(meta["identifier"]) ts = float(meta["commit_time"]) commit = meta["commit"] datedir = getDirectory(commit, ts) dirname = os.path.join( self.local_reporter, platformdir, frameworkdir, netdir, metric_dir, id_dir, datedir, ) i = 0 while os.path.exists(os.path.join(dirname, str(i))): i = i + 1 dirname = os.path.join(dirname, str(i)) os.makedirs(dirname) for d in data: filename = os.path.join(dirname, getFilename(d) + ".txt") content_d = json.dumps(data[d], indent=2, sort_keys=True) with open(filename, "w") as file: file.write(content_d) filename = os.path.join(dirname, getFilename(self.META) + ".txt") with open(filename, "w") as file: content_meta = json.dumps(meta, indent=2, sort_keys=True) file.write(content_meta) pname = platform_name if "platform_hash" in meta: pname = pname + " ({})".format(meta["platform_hash"]) getLogger().info("Writing file for {}: {}".format(pname, dirname))
def report(self, content): data = content[self.DATA] if data is None or len(data) == 0: getLogger().info("No data to write") return meta = content[self.META] id_dir = getFilename(meta["identifier"]) + "/" dirname = id_dir dirname = getArgs().simple_local_reporter + "/" + dirname assert not os.path.exists(dirname), \ "Simple local reporter should not have multiple entries" os.makedirs(dirname) with open(dirname + "/data.txt", 'w') as file: content_d = json.dumps(data) file.write(content_d) platform_name = meta[self.PLATFORM] pname = platform_name if "platform_hash" in meta: pname = pname + " ({})".format(meta["platform_hash"]) getLogger().info("Writing file for {}: {}".format(pname, dirname))
def report(self, content): data = content[self.DATA] if data is None or len(data) == 0: getLogger().info("No data to write") return meta = content[self.META] net_name = meta['net_name'] netdir = getFilename(net_name) + "/" platform_name = meta[self.PLATFORM] platformdir = getFilename(platform_name) + "/" framework_name = meta["framework"] frameworkdir = getFilename(framework_name) + "/" metric_name = meta['metric'] metric_dir = getFilename(metric_name) + "/" id_dir = getFilename(meta["identifier"]) + "/" ts = float(meta['commit_time']) commit = meta['commit'] datedir = getDirectory(commit, ts) dirname = platformdir + frameworkdir + netdir + metric_dir + id_dir + datedir dirname = getArgs().local_reporter + "/" + dirname i = 0 while os.path.exists(dirname + str(i)): i = i + 1 dirname = dirname + str(i) + "/" os.makedirs(dirname) for d in data: filename = dirname + getFilename(d) + ".txt" content_d = json.dumps(data[d], indent=2, sort_keys=True) with open(filename, 'w') as file: file.write(content_d) filename = dirname + getFilename(self.META) + ".txt" with open(filename, 'w') as file: content_meta = json.dumps(meta, indent=2, sort_keys=True) file.write(content_meta) pname = platform_name if "platform_hash" in meta: pname = pname + " ({})".format(meta["platform_hash"]) getLogger().info("Writing file for {}: {}".format(pname, dirname))
def setPlatform(self, platform): self.platform = getFilename(platform)
def setPlatform(self, platform): self.platform = getFilename(platform) if self.hash_platform_mapping and \ self.platform_hash in self.hash_platform_mapping: self.platform = self.hash_platform_mapping[self.platform_hash]