def setUpClass(cls): cls.numReferences = 25 cls.backend = backend.Backend(datarepo.SimulatedDataRepository( randomSeed=100, numDatasets=0, numReferenceSets=1, numReferencesPerReferenceSet=cls.numReferences)) cls.dataRepo = cls.backend.getDataRepository()
def setUp(self): self._maxDiff = None self._dataDir = paths.testDataDir self._dataUrl = "file://{}".format(self._dataDir) dataRepository = datarepo.FileSystemDataRepository(self._dataDir) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
def setUpClass(cls): cls.backend = backend.Backend(datarepo.SimulatedDataRepository( randomSeed=100, numDatasets=3, numVariantSets=3, numCalls=3, variantDensity=0.5, numReferenceSets=3, numReferencesPerReferenceSet=3, numReadGroupSets=3, numReadGroupsPerReadGroupSet=3, numAlignments=3)) cls.dataRepo = cls.backend.getDataRepository()
def setUp(self): self._maxDiff = None repoPath = paths.testDataRepo self._dataUrl = "file://{}".format(repoPath) dataRepository = datarepo.SqlDataRepository(repoPath) dataRepository.open(datarepo.MODE_READ) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
def setUp(self): self.request = protocol.SearchReadsRequest() self.backend = backend.Backend( datarepo.SimulatedDataRepository(numAlignments=0)) dataRepo = self.backend.getDataRepository() referenceSet = dataRepo.getReferenceSetByIndex(0) reference = referenceSet.getReferenceByIndex(0) self.request.reference_id = reference.getId() self.dataset = dataRepo.getDatasets()[0] self.readGroupSet = self.dataset.getReadGroupSets()[0]
def __init__(self, localId, dataPath): self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._referenceSet = None self._dataset = datasets.AbstractDataset("ds") self._readGroupInfos = {} self._readGroupSetInfo = None self._samFile = pysam.AlignmentFile(dataPath) self._readReferences() super(ReadGroupSetTest, self).__init__(localId, dataPath) self._readAlignmentInfo()
def setUp(self): global _wormtableTestFixture self._dataDir = _wormtableTestFixture.dataDir self._tables = {} self._chromIndexes = {} self._chromPosIndexes = {} for relativePath in os.listdir(self._dataDir): table = wt.open_table(os.path.join(self._dataDir, relativePath)) self._tables[relativePath] = table self._chromIndexes[relativePath] = table.open_index("CHROM") self._chromPosIndexes[relativePath] = table.open_index("CHROM+POS") self._backend = backend.Backend( self._dataDir, variants.WormtableVariantSet)
def setUp(self): class FakeRequest(object): pass class FakeTopLevelObject(object): def toProtocolElement(self): return self self.request = FakeRequest() self.request.pageToken = None self.numObjects = 3 self.objects = [FakeTopLevelObject() for j in range(self.numObjects)] self.backend = backend.Backend(datarepo.AbstractDataRepository())
def server_main(parser=None): if parser is None: parser = argparse.ArgumentParser(description="GA4GH reference server") # Add global options parser.add_argument("--port", "-P", default=8000, type=int, help="The port to listen on") parser.add_argument("--config", "-C", default='DefaultConfig', type=str, help="The configuration to use") parser.add_argument("--config-file", "-F", type=str, help="The configuration file to use") subparsers = parser.add_subparsers(title='subcommands', ) # help helpParser = subparsers.add_parser("help", description="ga4gh_server help", help="show this help message and exit") # Wormtable backend wtbParser = subparsers.add_parser( "wormtable", description="Serve the API using a wormtable based backend.", help="Serve data from tables.") wtbParser.add_argument( "dataDir", help="The directory containing the wormtables to be served.") wtbParser.set_defaults(variantSetClass=variants.WormtableVariantSet) # Tabix tabixParser = subparsers.add_parser( "tabix", description="Serve the API using a tabix based backend.", help="Serve data from Tabix indexed VCFs") tabixParser.add_argument("dataDir", help="The directory containing VCFs") tabixParser.set_defaults(variantSetClass=variants.TabixVariantSet) args = parser.parse_args() if "variantSetClass" not in args: parser.print_help() else: frontend.configure(args.config, args.config_file) frontend.app.backend = backend.Backend(args.dataDir, args.variantSetClass) frontend.app.run(host="0.0.0.0", port=args.port, debug=True)
def __init__(self, args): self._key = args.key # TODO this is an experimental addition which is useful for # testing. We should think about this and document it if we # this it's a useful feature. There is an argument for pushing # the backend instantiation into the client, and letting the # client be a factory, instantiating the correct Client class # depending on the prefix. filePrefix = "file://" if args.baseUrl.startswith(filePrefix): dataDir = args.baseUrl[len(filePrefix):] theBackend = backend.Backend( datarepo.FileSystemDataRepository(dataDir)) self._client = client.LocalClient(theBackend) else: self._client = client.HttpClient(args.baseUrl, verbosityToLogLevel(args.verbose), self._key)
def setUp(self): self.request = protocol.SearchVariantsRequest() self.backend = backend.Backend(datarepo.SimulatedDataRepository()) self.dataset = self.backend.getDataRepository().getDatasets()[0]
metavar='N', help='how many pages (max) to load ' 'from each test case (default: %(default)s)') parser.add_argument("--callSetIds", "-c", default=[], help="""Return variant calls which belong to call sets with these IDs. Pass in IDs as a comma separated list (no spaces), or '*' (with the single quotes!) to indicate 'all call sets'. Omit this option to indicate 'no call sets'. """) args = parser.parse_args() dataDir = "ga4gh-example-data" backend = backend.Backend(datarepo.FileSystemDataRepository(dataDir)) if args.profile == 'heap': backendClass = HeapProfilerBackend backend = backendClass(dataDir) args.repeatLimit = 1 args.pageLimit = 1 elif args.profile == 'cpu': backendClass = CpuProfilerBackend backend = backendClass(dataDir) # Get our list of callSetids callSetIds = args.callSetIds if callSetIds != []: callSetIds = None if args.callSetIds != "*": callSetIds = args.callSetIds.split(",")
def setUp(self): self._wtTestFixture = WormtableTestFixture() self._wtTestFixture.setUp() self.setUpServer(backend.Backend(self._wtTestFixture.dataDir, variants.WormtableVariantSet))
def setUp(self): self._variantSetName = "testVariantSet" self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._dataset = datasets.AbstractDataset(self._backend) self._variantSet = variants.AbstractVariantSet(self._dataset, self._variantSetName)
def setUp(self): self._featureSetName = "testFeatureSet" self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._dataset = datasets.AbstractDataset(self._backend) self._featureSet = features.AbstractFeatureSet(self._dataset, self._featureSetName)
def configure(configFile=None, baseConfig="ProductionConfig", port=8000, extraConfig={}): """ TODO Document this critical function! What does it do? What does it assume? """ configStr = 'ga4gh.serverconfig:{0}'.format(baseConfig) app.config.from_object(configStr) if os.environ.get('GA4GH_CONFIGURATION') is not None: app.config.from_envvar('GA4GH_CONFIGURATION') if configFile is not None: app.config.from_pyfile(configFile) app.config.update(extraConfig.items()) # Setup file handle cache max size datamodel.fileHandleCache.setMaxCacheSize( app.config["FILE_HANDLE_CACHE_MAX_SIZE"]) # Setup CORS cors.CORS(app, allow_headers='Content-Type') app.serverStatus = ServerStatus() # Allocate the backend # We use URLs to specify the backend. Currently we have file:// URLs (or # URLs with no scheme) for the FileSystemBackend, and special empty:// and # simulated:// URLs for empty or simulated data sources. dataSource = urlparse.urlparse(app.config["DATA_SOURCE"], "file") if dataSource.scheme == "simulated": # Ignore the query string randomSeed = app.config["SIMULATED_BACKEND_RANDOM_SEED"] numCalls = app.config["SIMULATED_BACKEND_NUM_CALLS"] variantDensity = app.config["SIMULATED_BACKEND_VARIANT_DENSITY"] numVariantSets = app.config["SIMULATED_BACKEND_NUM_VARIANT_SETS"] numReferenceSets = app.config[ "SIMULATED_BACKEND_NUM_REFERENCE_SETS"] numReferencesPerReferenceSet = app.config[ "SIMULATED_BACKEND_NUM_REFERENCES_PER_REFERENCE_SET"] numAlignmentsPerReadGroup = app.config[ "SIMULATED_BACKEND_NUM_ALIGNMENTS_PER_READ_GROUP"] dataRepository = datarepo.SimulatedDataRepository( randomSeed=randomSeed, numCalls=numCalls, variantDensity=variantDensity, numVariantSets=numVariantSets, numReferenceSets=numReferenceSets, numReferencesPerReferenceSet=numReferencesPerReferenceSet, numAlignments=numAlignmentsPerReadGroup) elif dataSource.scheme == "empty": dataRepository = datarepo.EmptyDataRepository() elif dataSource.scheme == "file": dataRepository = datarepo.FileSystemDataRepository(os.path.join( dataSource.netloc, dataSource.path)) dataRepository.checkConsistency() else: raise exceptions.ConfigurationException( "Unsupported data source scheme: " + dataSource.scheme) theBackend = backend.Backend(dataRepository) theBackend.setRequestValidation(app.config["REQUEST_VALIDATION"]) theBackend.setResponseValidation(app.config["RESPONSE_VALIDATION"]) theBackend.setDefaultPageSize(app.config["DEFAULT_PAGE_SIZE"]) theBackend.setMaxResponseLength(app.config["MAX_RESPONSE_LENGTH"]) app.backend = theBackend app.secret_key = os.urandom(SECRET_KEY_LENGTH) app.oidcClient = None app.tokenMap = None app.myPort = port if "OIDC_PROVIDER" in app.config: # The oic client. If we're testing, we don't want to verify # SSL certificates app.oidcClient = oic.oic.Client( verify_ssl=('TESTING' not in app.config)) app.tokenMap = {} try: app.oidcClient.provider_config(app.config['OIDC_PROVIDER']) except requests.exceptions.ConnectionError: configResponse = message.ProviderConfigurationResponse( issuer=app.config['OIDC_PROVIDER'], authorization_endpoint=app.config['OIDC_AUTHZ_ENDPOINT'], token_endpoint=app.config['OIDC_TOKEN_ENDPOINT'], revocation_endpoint=app.config['OIDC_TOKEN_REV_ENDPOINT']) app.oidcClient.handle_provider_config(configResponse, app.config['OIDC_PROVIDER']) # The redirect URI comes from the configuration. # If we are testing, then we allow the automatic creation of a # redirect uri if none is configured redirectUri = app.config.get('OIDC_REDIRECT_URI') if redirectUri is None and 'TESTING' in app.config: redirectUri = 'https://{0}:{1}/oauth2callback'.format( socket.gethostname(), app.myPort) app.oidcClient.redirect_uris = [redirectUri] if redirectUri is []: raise exceptions.ConfigurationException( 'OIDC configuration requires a redirect uri') # We only support dynamic registration while testing. if ('registration_endpoint' in app.oidcClient.provider_info and 'TESTING' in app.config): app.oidcClient.register( app.oidcClient.provider_info["registration_endpoint"], redirect_uris=[redirectUri]) else: response = message.RegistrationResponse( client_id=app.config['OIDC_CLIENT_ID'], client_secret=app.config['OIDC_CLIENT_SECRET'], redirect_uris=[redirectUri], verify_ssl=False) app.oidcClient.store_registration_info(response)
def setUp(self): dataRepository = datarepo.FileSystemDataRepository(paths.testDataDir) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
args = parser.parse_args() registryDb = "ga4gh-example-data/registry.db" if args.profile == 'heap': backendClass = HeapProfilerBackend backend = backendClass(registryDb) args.repeatLimit = 1 args.pageLimit = 1 elif args.profile == 'cpu': backendClass = CpuProfilerBackend backend = backendClass(registryDb) else: repo = datarepo.SqlDataRepository(registryDb) repo.open(datarepo.MODE_READ) backend = backend.Backend(repo) # Get our list of callSetids callSetIds = args.callSetIds if callSetIds != []: callSetIds = None if args.callSetIds != "*": callSetIds = args.callSetIds.split(",") minTime = benchmarkOneQuery(_heavyQuery(args.variantSetId, callSetIds), args.repeatLimit, args.pageLimit) print(minTime) if args.profile == 'cpu': stats = pstats.Stats(backend.profiler) stats.sort_stats('time') stats.print_stats(.25)
def setUp(self): self._dataDir = os.path.join("tests", "data") self._backend = backend.Backend( datarepo.FileSystemDataRepository(self._dataDir)) self._dataRepo = self._backend.getDataRepository()
def testBadReferenceDatasetMetadata(self): localId = "bad_metadata" path = self.getFullPath(localId) localBackend = backend.Backend(datarepo.EmptyDataRepository()) with self.assertRaises(exceptions.MissingDatasetMetadataException): datasets.FileSystemDataset(localId, path, localBackend)
def setUp(self): self._repo = datarepo.SqlDataRepository(paths.testDataRepo) self._repo.open(datarepo.MODE_READ) self._backend = backend.Backend(self._repo) self._client = client.LocalClient(self._backend)
def setUp(self): self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._referenceSet = references.AbstractReferenceSet(self._backend) self._reference = references.AbstractReference(self._referenceSet, "ref")
def setUp(self): self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._dataRepo = self._backend.getDataRepository()