def run(self, testName, server, javaExeClass, cmdArgs, logFile): cmd = self.get_run_command(server, javaExeClass, cmdArgs) utils.info('Running %s bench. Logging at %s' % (testName, logFile)) utils.info('Executing: %s' % ' '.join(cmd)) t0 = time.time() utils.runComand(testName, cmd, logFile) t1 = time.time() - t0 s = open(logFile).read() bytesIndexed = int(reBytesIndexed.search(s).group(1)) indexTimeSec = int(reIndexingTime.search(s).group(1)) / 1000.0 # extract GC times times = {} garbage = {} peak = {} with open(logFile) as f: for line in f.readlines(): m = reTimeIn.search(line) if m is not None: times[m.group(1)] = float(m.group(2)) / 1000. m = reGarbageIn.search(line) if m is not None: garbage[m.group(1)] = float(m.group(2)) m = rePeakUsage.search(line) if m is not None: peak[m.group(1)] = float(m.group(2)) utils.info(' took %.1f sec by client' % indexTimeSec) utils.info(' took %.1f sec total' % t1) docsIndexed = server.get_num_found(constants.SOLR_COLLECTION_NAME) return bytesIndexed, indexTimeSec, docsIndexed, times, garbage, peak
def compile(self, server, runLogDir): buildDir = self.build_dir() if not os.path.exists(buildDir): os.makedirs(buildDir) cmd = ['javac', '-d', buildDir, '-classpath', ':'.join(server.get_jars())] cmd.extend(self.src_files()) utils.info('Running: %s' % ' '.join(cmd)) utils.runComand('javac', cmd, os.path.join(runLogDir, 'java-bench-compile.log.txt'))
def compile(self, server, runLogDir): buildDir = self.build_dir() if not os.path.exists(buildDir): os.makedirs(buildDir) cmd = [ 'javac', '-d', buildDir, '-classpath', ':'.join(server.get_jars()) ] cmd.extend(self.src_files()) utils.info('Running: %s' % ' '.join(cmd)) utils.runComand('javac', cmd, os.path.join(runLogDir, 'java-bench-compile.log.txt'))
def create_collection(self, runLogDir, collection, num_shards='1', replication_factor='1', config = 'data_driven_schema_configs'): x = os.getcwd() try: os.chdir(self.extract_dir) cmd = ['%s/bin/solr' % self.extract_dir, 'create_collection', '-p', self.port, '-c', collection, '-shards', num_shards, '-replicationFactor', replication_factor, '-d', config] utils.info('Creating collection with command: %s' % ' '.join(cmd)) utils.runComand('solr create_collection', cmd, '%s/create-collection%s.log.txt' % (runLogDir, self.name)) finally: os.chdir(x)
def run_simple_bench(start, tgz, runLogDir, perfFile): server = SolrServer(tgz, '%s/simple' % constants.BENCH_DIR, example='schemaless', memory='2g') server.extract(runLogDir) try: server.start(runLogDir) time.sleep(5) solrMajorVersion, solrImplVersion = server.get_version() cmd = [ '%s/bin/post' % server.extract_dir, '-c', constants.SOLR_COLLECTION_NAME, constants.IMDB_DATA_FILE ] logFile = '%s/simpleIndexer.log.txt' % runLogDir utils.info('Running simple bench. Logging at: %s' % logFile) utils.info('Executing: %s' % ' '.join(cmd)) t0 = time.time() utils.runComand('binpost', cmd, logFile) t1 = time.time() - t0 bytesIndexed = os.stat(constants.IMDB_DATA_FILE).st_size docsIndexed = utils.get_num_found(constants.SOLR_COLLECTION_NAME) if docsIndexed != constants.IMDB_NUM_DOCS: raise RuntimeError( 'Indexed num_docs do not match expected %d != found %d' % (constants.IMDB_NUM_DOCS, docsIndexed)) print ' %.1f s' % (t1) if not NOREPORT: with open(perfFile, 'a+') as f: timeStampLoggable = '%04d-%02d-%02d %02d:%02d:%02d' % ( start.year, start.month, start.day, start.hour, start.minute, start.second) f.write('%s,%d,%d,%.1f,%s,%s\n' % (timeStampLoggable, bytesIndexed, docsIndexed, t1, solrMajorVersion, solrImplVersion)) return bytesIndexed, docsIndexed, t1 finally: server.stop() time.sleep(5)
def create_collection(self, runLogDir, collection, num_shards='1', replication_factor='1', config='data_driven_schema_configs'): x = os.getcwd() try: os.chdir(self.extract_dir) cmd = [ '%s/bin/solr' % self.extract_dir, 'create_collection', '-p', self.port, '-c', collection, '-shards', num_shards, '-replicationFactor', replication_factor, '-d', config ] utils.info('Creating collection with command: %s' % ' '.join(cmd)) utils.runComand( 'solr create_collection', cmd, '%s/create-collection%s.log.txt' % (runLogDir, self.name)) finally: os.chdir(x)
def start(self, runLogDir): x = os.getcwd() try: os.chdir(self.extract_dir) cmd = ['%s/bin/solr' % self.extract_dir, 'start', '-p', self.port] if self.host is not None: cmd.extend(['-h', self.host]) if self.memory is not None: cmd.extend(['-m', self.memory]) if self.zk_host is not None: cmd.extend(['-c', '-z', self.zk_host]) if self.server_dir is not None: cmd.extend(['-d', self.server_dir]) if self.solr_home is not None: cmd.extend(['-s', self.solr_home]) if self.example is not None: cmd.extend(['-e', self.example]) if self.jvm_args is not None: cmd.append(self.jvm_args) utils.info('Running solr with command: %s' % ' '.join(cmd)) utils.runComand('solr server', cmd, '%s/server%s.log.txt' % (runLogDir, self.name)) finally: os.chdir(x)
def run_simple_bench(start, tgz, runLogDir, perfFile): server = SolrServer(tgz, '%s/simple' % constants.BENCH_DIR, example='schemaless', memory='2g') server.extract(runLogDir) try: server.start(runLogDir) time.sleep(5) solrMajorVersion, solrImplVersion = server.get_version() cmd = ['%s/bin/post' % server.extract_dir, '-c', constants.SOLR_COLLECTION_NAME, constants.IMDB_DATA_FILE] logFile = '%s/simpleIndexer.log.txt' % runLogDir utils.info('Running simple bench. Logging at: %s' % logFile) utils.info('Executing: %s' % ' '.join(cmd)) t0 = time.time() utils.runComand('binpost', cmd, logFile) t1 = time.time() - t0 bytesIndexed = os.stat(constants.IMDB_DATA_FILE).st_size docsIndexed = utils.get_num_found(constants.SOLR_COLLECTION_NAME) if docsIndexed != constants.IMDB_NUM_DOCS: raise RuntimeError( 'Indexed num_docs do not match expected %d != found %d' % (constants.IMDB_NUM_DOCS, docsIndexed)) print ' %.1f s' % (t1) if not NOREPORT: with open(perfFile, 'a+') as f: timeStampLoggable = '%04d-%02d-%02d %02d:%02d:%02d' % ( start.year, start.month, start.day, start.hour, start.minute, start.second) f.write('%s,%d,%d,%.1f,%s,%s\n' % ( timeStampLoggable, bytesIndexed, docsIndexed, t1, solrMajorVersion, solrImplVersion)) return bytesIndexed, docsIndexed, t1 finally: server.stop() time.sleep(5)