def get_mon_hosts(self): # get the list of mons mon_hosts = {} mons_config = settings.cluster.get('mons') # For mons specified using the string/list representation, we'll # assume mon ids range from a-z in order and default ports are used. if isinstance(mons_config, str): host = mons_config info = settings.host_info(host) mon_str = '%s:6789' % info['addr'] mon_hosts[host] = {'a': mon_str} elif isinstance(mons_config, list): mon_id = 'a' for host in mons_config: info = settings.host_info(host) mon_str = '%s:6789' % info['addr'] mon_hosts[host] = {mon_id: mon_str} if ord(mon_id) < ord('z'): mon_id = chr(ord(mon_id) + 1) else: raise ValueError("CBT does not support 27+ monitors") # dict representation contains hostnames with mon_id / ip:port pair: # # localhost: # a: "127.0.0.1:6789" elif isinstance(mon_hosts, dict): for host, mon_config in mons_config.items(): mon_hosts[host] = {} for mon_id, addr in mon_config.items(): mon_hosts[host][mon_id] = addr else: raise ValueError("Failed to parse monitor syntax: %r" % mon_hosts) return mon_hosts
def evaluate(self, baseline): runs = [] if self.prefill_time or self.prefill_objects: runs.append('prefill') if not self.read_only: runs.append('write') if not self.write_only: runs.append(self.readmode) results = [] for run in runs: out_dirs = [ os.path.join(self.out_dir, run), os.path.join(baseline.out_dir, run) ] for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for proc in range(self.concurrent_procs): fname = 'json_output.{proc}.{host}'.format(proc=proc, host=host) client_run = '{run}/{client}/{proc}'.format(run=run, client=client, proc=proc) fpaths = [os.path.join(d, fname) for d in out_dirs] compare_results = self._compare_client_results( client_run, fpaths) rejected = sum(not result.accepted for result in compare_results) results.extend(compare_results) # TODO: check results from monitors return results
def evaluate(self, baseline): runs = [] if self.prefill_time or self.prefill_objects: runs.append('prefill') if not self.read_only: runs.append('write') if not self.write_only: runs.append(self.readmode) results = [] for run in runs: out_dirs = [ os.path.join(self.out_dir, run), os.path.join(baseline.out_dir, run) ] for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for proc in range(self.concurrent_procs): self_analyzer = self.create_data_analyzer(run, host, proc) baseline_analyzer = baseline.create_data_analyzer( run, host, proc) client_run = '{run}/{client}/{proc}'.format(run=run, client=client, proc=proc) compare_results = self._compare_client_results( client_run, self_analyzer, baseline_analyzer) rejected = sum(not result.accepted for result in compare_results) results.extend(compare_results) # TODO: check results from monitors return results
def parse(self, out_dir): for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for i in range(self.concurrent_procs): result = {} found = 0 out_file = '%s/output.%s.%s' % (out_dir, i, host) json_out_file = '%s/json_output.%s.%s' % (out_dir, i, host) with open(out_file) as fd: for line in fd.readlines(): if found == 0: if "Total time run" in line: found = 1 if found == 1: key, val = (_.strip() for _ in line.split(":")) try: if any(_ in key for _ in ['size']): result[key] = int(val) else: result[key] = float(val) except: raise Exception( "Conversion error for line: %s" % line) with open(json_out_file, 'w') as json_fd: json.dump(result, json_fd)
def get_localnode(nodes): # Similarly to `expanded_node_list(nodes)` we assume the passed nodes # param is always string. This is justified as the callers use `nodes` # to supply the `-w ...` parameter of ssh during CheckedPopen() call. local_fqdn = get_fqdn_local() local_hostname = socket.gethostname() local_short_hostname = local_hostname.split('.')[0] for node in expanded_node_list(nodes): remote_host = settings.host_info(node)['host'] if remote_host in (local_fqdn, local_hostname, local_short_hostname): return remote_host return None
def parse(self, out_dir): for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for i in range(self.volumes_per_client): found = 0 out_file = '%s/output.%d.%s' % (out_dir, i, host) json_out_file = '%s/json_output.%d.%s' % (out_dir, i, host) with open(out_file) as fd: with open(json_out_file, 'w') as json_fd: for line in fd.readlines(): if len(line.strip()) == 0: found = 0 break if found == 1: json_fd.write(line) if found == 0: if "Starting" in line: found = 1
def parse(self, out_dir): for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for i in range(self.concurrent_procs): result = {} found = 0 out_file = '%s/output.%s.%s' % (out_dir, i, host) json_out_file = '%s/json_output.%s.%s' % (out_dir, i, host) with open(out_file) as fd: for line in fd.readlines(): if found == 0: if "Total time run" in line: found = 1 if found == 1: line = line.strip() key, val = line.split(":") result[key.strip()] = val.strip() with open(json_out_file, 'w') as json_fd: json.dump(result, json_fd)
def analyze(self, out_dir): logger.info('Convert results to json format.') for client in settings.getnodes('clients').split(','): host = settings.host_info(client)["host"] for i in xrange(self.endpoints_per_client): found = 0 out_file = '%s/output.%d.%s' % (out_dir, i, host) json_out_file = '%s/json_output.%d.%s' % (out_dir, i, host) with open(out_file) as fd: with open(json_out_file, 'w') as json_fd: for line in fd.readlines(): if len(line.strip()) == 0: found = 0 break if found == 1: json_fd.write(line) if found == 0: if "Starting" in line: found = 1