def list(self): """ List the contents of the repo """ self._check() self._repoEmit("Listing") dataRepo = datarepo.FileSystemDataRepository(self._repoPath) self._emit(self.referenceSetsDirName) for referenceSet in dataRepo.getReferenceSets(): self._emitIndent(referenceSet.getLocalId()) for reference in referenceSet.getReferences(): self._emitIndent(reference.getLocalId(), 2) self._emit(self.ontologiesDirName) for ontologyMap in dataRepo.getOntologyMaps(): self._emitIndent(ontologyMap.getLocalId(), 1) self._emit(self.datasetsDirName) for dataset in dataRepo.getDatasets(): self._emitIndent(dataset.getLocalId()) self._emitIndent(self.readsDirName, 2) for readGroupSet in dataset.getReadGroupSets(): self._emitIndent(readGroupSet.getLocalId(), 3) self._emitIndent(self.variantsDirName, 2) for variantSet in dataset.getVariantSets(): self._emitIndent(variantSet.getLocalId(), 3) for chromFile in sorted(variantSet._chromFileMap.keys()): self._emitIndent(chromFile, 4) self._emitIndent(self.featuresDirName, 2) for featureSet in dataset.getFeatureSets(): self._emitIndent(featureSet.getLocalId(), 3)
def list(self): """ List the contents of the repo """ self._check() self._repoEmit("Listing") dataRepo = datarepo.FileSystemDataRepository(self._repoPath, doConsistencyCheck=False) self._emit(self.referenceSetsDirName) for referenceSet in dataRepo.getReferenceSets(): self._emitIndent(referenceSet.getLocalId()) for reference in referenceSet.getReferences(): self._emitIndent(reference.getLocalId(), 2) self._emit(self.datasetsDirName) for dataset in dataRepo.getDatasets(): self._emitIndent(dataset.getLocalId()) self._emitIndent(self.readsDirName, 2) for readGroupSet in dataset.getReadGroupSets(): self._emitIndent(readGroupSet.getLocalId(), 3) self._emitIndent(self.variantsDirName, 2) for variantSet in dataset.getVariantSets(): self._emitIndent(variantSet.getLocalId(), 3) for chromFile in sorted(variantSet._chromFileMap.keys()): self._emitIndent(chromFile, 4)
def setUp(self): self._maxDiff = None self._dataDir = paths.testDataDir self._dataUrl = "file://{}".format(self._dataDir) dataRepository = datarepo.FileSystemDataRepository(self._dataDir) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
def check(self, doConsistencyCheck=False): """ Check the repository for well-formedness """ self._check() if doConsistencyCheck: dataRepo = datarepo.FileSystemDataRepository(self._repoPath) dataRepo.checkConsistency() self._repoEmit("Well-formed".format(self._repoPath))
def __init__(self, featureSetLocalName, dataPath): """ :param localId: Name of the GFF3 resource corresponding to a pair of files, .db and .gff3 :param dataPath: string representing full path to the .db file :return: """ self._dataset = datasets.AbstractDataset(_datasetName) self._datarepo = datarepo.FileSystemDataRepository("tests/data") featureSetLocalName = featureSetLocalName[:-3] # remove '.db' self._testData = _testDataForFeatureSetName[featureSetLocalName] super(FeatureSetTests, self).__init__(featureSetLocalName, dataPath)
def setUp(self): self._variantSetName = "testVariantSet" self._backend = datarepo.FileSystemDataRepository("tests/data") self._dataset = datasets.AbstractDataset(self._backend) self._variantSet = variants.AbstractVariantSet( self._dataset, self._variantSetName) self._variantAnnotationSet = \ variants.HtslibVariantAnnotationSet( self._dataset, "vas", "tests/data/datasets/dataset1/variants/WASH7P_annotation", self._backend, self._variantSet)
def __init__(self, variantAnnotationSetId, baseDir): self._dataset = datasets.AbstractDataset("ds") self._datarepo = datarepo.FileSystemDataRepository("tests/data") super(VariantAnnotationSetTest, self).__init__(variantAnnotationSetId, baseDir) self._variantSet = variants.HtslibVariantSet(self._dataset, "vs", self._dataPath, None) self._variantRecords = [] self._referenceNames = set() # Only read in VCF files with a JSON sidecar saying they're annotated. for vcfFile in glob.glob(os.path.join(self._dataPath, "*.vcf.gz")): if self._isAnnotated(): self._readVcf(vcfFile) self._isCsq = self._hasConsequenceField()
def __init__(self, args): self._key = args.key # TODO this is an experimental addition which is useful for # testing. We should think about this and document it if we # this it's a useful feature. There is an argument for pushing # the backend instantiation into the client, and letting the # client be a factory, instantiating the correct Client class # depending on the prefix. filePrefix = "file://" if args.baseUrl.startswith(filePrefix): dataDir = args.baseUrl[len(filePrefix):] theBackend = backend.Backend( datarepo.FileSystemDataRepository(dataDir)) self._client = client.LocalClient(theBackend) else: self._client = client.HttpClient(args.baseUrl, verbosityToLogLevel(args.verbose), self._key)
def __init__(self, dataDir): dataRepository = datarepo.FileSystemDataRepository(dataDir) super(CpuProfilerBackend, self).__init__(dataRepository) self.profiler = cProfile.Profile()
def __init__(self, dataDir): dataRepository = datarepo.FileSystemDataRepository(dataDir) super(HeapProfilerBackend, self).__init__(dataRepository) self.profiler = guppy.hpy()
metavar='N', help='how many pages (max) to load ' 'from each test case (default: %(default)s)') parser.add_argument("--callSetIds", "-c", default=[], help="""Return variant calls which belong to call sets with these IDs. Pass in IDs as a comma separated list (no spaces), or '*' (with the single quotes!) to indicate 'all call sets'. Omit this option to indicate 'no call sets'. """) args = parser.parse_args() dataDir = "ga4gh-example-data" backend = backend.Backend(datarepo.FileSystemDataRepository(dataDir)) if args.profile == 'heap': backendClass = HeapProfilerBackend backend = backendClass(dataDir) args.repeatLimit = 1 args.pageLimit = 1 elif args.profile == 'cpu': backendClass = CpuProfilerBackend backend = backendClass(dataDir) # Get our list of callSetids callSetIds = args.callSetIds if callSetIds != []: callSetIds = None if args.callSetIds != "*": callSetIds = args.callSetIds.split(",")
def configure(configFile=None, baseConfig="ProductionConfig", port=8000, extraConfig={}): """ TODO Document this critical function! What does it do? What does it assume? """ configStr = 'ga4gh.serverconfig:{0}'.format(baseConfig) app.config.from_object(configStr) if os.environ.get('GA4GH_CONFIGURATION') is not None: app.config.from_envvar('GA4GH_CONFIGURATION') if configFile is not None: app.config.from_pyfile(configFile) app.config.update(extraConfig.items()) # Setup file handle cache max size datamodel.fileHandleCache.setMaxCacheSize( app.config["FILE_HANDLE_CACHE_MAX_SIZE"]) # Setup CORS cors.CORS(app, allow_headers='Content-Type') app.serverStatus = ServerStatus() # Allocate the backend # We use URLs to specify the backend. Currently we have file:// URLs (or # URLs with no scheme) for the FileSystemBackend, and special empty:// and # simulated:// URLs for empty or simulated data sources. dataSource = urlparse.urlparse(app.config["DATA_SOURCE"], "file") if dataSource.scheme == "simulated": # Ignore the query string randomSeed = app.config["SIMULATED_BACKEND_RANDOM_SEED"] numCalls = app.config["SIMULATED_BACKEND_NUM_CALLS"] variantDensity = app.config["SIMULATED_BACKEND_VARIANT_DENSITY"] numVariantSets = app.config["SIMULATED_BACKEND_NUM_VARIANT_SETS"] numReferenceSets = app.config[ "SIMULATED_BACKEND_NUM_REFERENCE_SETS"] numReferencesPerReferenceSet = app.config[ "SIMULATED_BACKEND_NUM_REFERENCES_PER_REFERENCE_SET"] numAlignmentsPerReadGroup = app.config[ "SIMULATED_BACKEND_NUM_ALIGNMENTS_PER_READ_GROUP"] dataRepository = datarepo.SimulatedDataRepository( randomSeed=randomSeed, numCalls=numCalls, variantDensity=variantDensity, numVariantSets=numVariantSets, numReferenceSets=numReferenceSets, numReferencesPerReferenceSet=numReferencesPerReferenceSet, numAlignments=numAlignmentsPerReadGroup) elif dataSource.scheme == "empty": dataRepository = datarepo.EmptyDataRepository() elif dataSource.scheme == "file": dataRepository = datarepo.FileSystemDataRepository(os.path.join( dataSource.netloc, dataSource.path)) dataRepository.checkConsistency() else: raise exceptions.ConfigurationException( "Unsupported data source scheme: " + dataSource.scheme) theBackend = backend.Backend(dataRepository) theBackend.setRequestValidation(app.config["REQUEST_VALIDATION"]) theBackend.setResponseValidation(app.config["RESPONSE_VALIDATION"]) theBackend.setDefaultPageSize(app.config["DEFAULT_PAGE_SIZE"]) theBackend.setMaxResponseLength(app.config["MAX_RESPONSE_LENGTH"]) app.backend = theBackend app.secret_key = os.urandom(SECRET_KEY_LENGTH) app.oidcClient = None app.tokenMap = None app.myPort = port if "OIDC_PROVIDER" in app.config: # The oic client. If we're testing, we don't want to verify # SSL certificates app.oidcClient = oic.oic.Client( verify_ssl=('TESTING' not in app.config)) app.tokenMap = {} try: app.oidcClient.provider_config(app.config['OIDC_PROVIDER']) except requests.exceptions.ConnectionError: configResponse = message.ProviderConfigurationResponse( issuer=app.config['OIDC_PROVIDER'], authorization_endpoint=app.config['OIDC_AUTHZ_ENDPOINT'], token_endpoint=app.config['OIDC_TOKEN_ENDPOINT'], revocation_endpoint=app.config['OIDC_TOKEN_REV_ENDPOINT']) app.oidcClient.handle_provider_config(configResponse, app.config['OIDC_PROVIDER']) # The redirect URI comes from the configuration. # If we are testing, then we allow the automatic creation of a # redirect uri if none is configured redirectUri = app.config.get('OIDC_REDIRECT_URI') if redirectUri is None and 'TESTING' in app.config: redirectUri = 'https://{0}:{1}/oauth2callback'.format( socket.gethostname(), app.myPort) app.oidcClient.redirect_uris = [redirectUri] if redirectUri is []: raise exceptions.ConfigurationException( 'OIDC configuration requires a redirect uri') # We only support dynamic registration while testing. if ('registration_endpoint' in app.oidcClient.provider_info and 'TESTING' in app.config): app.oidcClient.register( app.oidcClient.provider_info["registration_endpoint"], redirect_uris=[redirectUri]) else: response = message.RegistrationResponse( client_id=app.config['OIDC_CLIENT_ID'], client_secret=app.config['OIDC_CLIENT_SECRET'], redirect_uris=[redirectUri], verify_ssl=False) app.oidcClient.store_registration_info(response)
def setUp(self): self._dataDir = os.path.join("tests", "data") self._backend = backend.Backend( datarepo.FileSystemDataRepository(self._dataDir)) self._dataRepo = self._backend.getDataRepository()
def setUp(self): dataRepository = datarepo.FileSystemDataRepository(paths.testDataDir) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)