def _ReadTestDataStoreFiles() -> datastore_pb2.DataStoreTestSet: """Read the config protos for testing. The datastore names are derived from the file names. Returns: A DataStoreTestSet instance. Raises: AssertionError: In case of error reading datastore configs. """ paths = list( pathlib.Path('deeplearning/deepsmith/tests/data/datastores').iterdir()) assert paths names = [p.stem for p in paths] protos = [ pbutil.FromFile(path, datastore_pb2.DataStore()) for path in paths ] datastore_set = datastore_pb2.DataStoreTestSet() for name, proto in zip(names, protos): # There's no graceful error handling here, but it's important that we don't # run tests on a datastore unless it's specifically marked as testonly. assert proto.testonly dst_proto = datastore_set.values[name] dst_proto.MergeFrom(proto) assert len(datastore_set.values) == len(protos) == len(names) == len(paths) return datastore_set
def main(argv): if len(argv) > 1: raise app.UsageError('Unrecognized arguments') if FLAGS.generator_batch_size <= 0: raise app.UsageError('--generator_batch_size must be positive') datastore_config = services.ServiceConfigFromFlag( 'datastore_config', datastore_pb2.DataStore()) generator_config = services.ServiceConfigFromFlag( 'generator_config', generator_pb2.ClgenGenerator()) datastore_stub = services.GetServiceStub( datastore_config, datastore_pb2_grpc.DataStoreServiceStub) generator_stub = services.GetServiceStub( generator_config, generator_pb2_grpc.GeneratorServiceStub) target_total_testcases = FLAGS.target_total_testcases generator_batch_size = FLAGS.generator_batch_size capabilities = GetGeneratorCapabilities(generator_stub) while True: num_testcases = GetNumberOfTestcasesInDataStore( datastore_stub, capabilities) logging.info(f'Number of testcases in datastore: %d', num_testcases) if 0 <= target_total_testcases <= num_testcases: logging.info('Stopping generation with %d testcases in the DataStore.', num_testcases) break num_to_generate = generator_batch_size if target_total_testcases >= 0: num_to_generate = min( generator_batch_size, target_total_testcases - num_testcases) testcases = GenerateTestcases(generator_stub, num_to_generate) SubmitTestcases(datastore_stub, testcases)
def ToProto(self) -> datastore_pb2.DataStore: """Get the Protocol Buffer representation of the datastore. Returns: A DataStore message instance. """ proto = datastore_pb2.DataStore() proto.CopyFrom(self._config) return proto
def FromFile(cls, path: pathlib.Path) -> "DataStore": """Instantiate a DataStore from a config file. Args: path: Path to the datastore config proto file. Returns: A DataStore instance. """ config = pbutil.FromFile(path, datastore_pb2.DataStore()) return DataStore(config)
def main(argv): if len(argv) > 1: raise app.UsageError("Unrecognized arguments") if FLAGS.harness_batch_size <= 0: raise app.UsageError("--harness_batch_size must be positive") datastore_config = services.ServiceConfigFromFlag( "datastore_config", datastore_pb2.DataStore()) harness_config = services.ServiceConfigFromFlag( "harness_config", harness_pb2.CldriveHarness()) datastore_stub = services.GetServiceStub( datastore_config, datastore_pb2_grpc.DataStoreServiceStub) harness_stub = services.GetServiceStub(harness_config, harness_pb2_grpc.HarnessServiceStub) target_total_results = FLAGS.target_total_results harness_batch_size = FLAGS.harness_batch_size capabilities = GetHarnessCapabilities(harness_stub) testbeds = collections.deque(capabilities.testbeds) if testbeds: app.Log( 1, "%d testbeds: %s", len(capabilities.testbeds), ", ".join(x.name for x in capabilities.testbeds), ) while testbeds: testbed = testbeds.popleft() testcases = GetTestcasesToRun( datastore_stub, capabilities.harness, testbed, target_total_results, harness_batch_size, ) app.Log( 1, "Received %d testcases to execute on %s", len(testcases), testbed.name, ) if testcases: results = RunTestcases(harness_stub, testbed, testcases) SubmitResults(datastore_stub, results) # If there are testcases to run, then we add it back to the testbeds # queue, as there may be more. testbeds.append(testbed) app.Log(1, "done") else: app.Warning("No testbeds, nothing to do!")
def main(argv): if len(argv) > 1: raise app.UsageError('Unrecognized arguments') datastore_config = services.ServiceConfigFromFlag( 'datastore_config', datastore_pb2.DataStore()) server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) services.AssertLocalServiceHostname(datastore_config.service) service = DataStore(datastore_config) datastore_pb2_grpc.add_DataStoreServiceServicer_to_server(service, server) server.add_insecure_port(f'[::]:{datastore_config.service.port}') logging.info('%s listening on %s:%s', type(service).__name__, datastore_config.service.hostname, datastore_config.service.port) server.start() try: while True: time.sleep(3600 * 24) except KeyboardInterrupt: server.stop(0)
def main(argv): if len(argv) > 1: raise app.UsageError('Unrecognized arguments') if FLAGS.harness_batch_size <= 0: raise app.UsageError('--harness_batch_size must be positive') datastore_config = services.ServiceConfigFromFlag( 'datastore_config', datastore_pb2.DataStore()) harness_config = services.ServiceConfigFromFlag( 'harness_config', harness_pb2.CldriveHarness()) datastore_stub = services.GetServiceStub( datastore_config, datastore_pb2_grpc.DataStoreServiceStub) harness_stub = services.GetServiceStub(harness_config, harness_pb2_grpc.HarnessServiceStub) target_total_results = FLAGS.target_total_results harness_batch_size = FLAGS.harness_batch_size capabilities = GetHarnessCapabilities(harness_stub) testbeds = collections.deque(capabilities.testbeds) if testbeds: logging.info('%d testbeds: %s', len(capabilities.testbeds), ', '.join(x.name for x in capabilities.testbeds)) while testbeds: testbed = testbeds.popleft() testcases = GetTestcasesToRun(datastore_stub, capabilities.harness, testbed, target_total_results, harness_batch_size) logging.info('Received %d testcases to execute on %s', len(testcases), testbed.name) if testcases: results = RunTestcases(harness_stub, testbed, testcases) SubmitResults(datastore_stub, results) # If there are testcases to run, then we add it back to the testbeds # queue, as there may be more. testbeds.append(testbed) logging.info('done') else: logging.warning('No testbeds, nothing to do!')