def setUp(self): dataRepository = datarepo.SqlDataRepository(paths.testDataRepo) dataRepository.open(datarepo.MODE_READ) self.backend = backend.Backend(dataRepository) self.dataset = self.backend.getDataRepository().getDatasets()[0] self.dataset_id = self.dataset.getId() self.access_map = {self.dataset.getLocalId(): 4}
def setUp(self): self._backend = backend.Backend( datarepo.AbstractDataRepository()) self._referenceSet = references.AbstractReferenceSet( 'refSetId') self._reference = references.AbstractReference( self._referenceSet, "ref")
def setUp(self): self._maxDiff = None repoPath = paths.testDataRepo self._dataUrl = moduleTestServer.getUrl() dataRepository = datarepo.SqlDataRepository(repoPath) dataRepository.open(datarepo.MODE_READ) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
def setUpClass(cls): cls.numReferences = 25 cls.backend = backend.Backend( datarepo.SimulatedDataRepository( randomSeed=100, numDatasets=0, numReferenceSets=1, numReferencesPerReferenceSet=cls.numReferences)) cls.dataRepo = cls.backend.getDataRepository()
def _configure_backend(app): """A helper function used just to help modularize the code a bit.""" # Allocate the backend # We use URLs to specify the backend. Currently we have file:// URLs (or # URLs with no scheme) for the SqlDataRepository, and special empty:// and # simulated:// URLs for empty or simulated data sources. dataSource = urlparse.urlparse(app.config["DATA_SOURCE"], "file") if dataSource.scheme == "simulated": # Ignore the query string randomSeed = app.config["SIMULATED_BACKEND_RANDOM_SEED"] numCalls = app.config["SIMULATED_BACKEND_NUM_CALLS"] variantDensity = app.config["SIMULATED_BACKEND_VARIANT_DENSITY"] numVariantSets = app.config["SIMULATED_BACKEND_NUM_VARIANT_SETS"] numReferenceSets = app.config["SIMULATED_BACKEND_NUM_REFERENCE_SETS"] numReferencesPerReferenceSet = app.config[ "SIMULATED_BACKEND_NUM_REFERENCES_PER_REFERENCE_SET"] numAlignmentsPerReadGroup = app.config[ "SIMULATED_BACKEND_NUM_ALIGNMENTS_PER_READ_GROUP"] numReadGroupsPerReadGroupSet = app.config[ "SIMULATED_BACKEND_NUM_READ_GROUPS_PER_READ_GROUP_SET"] numPhenotypeAssociations = app.config[ "SIMULATED_BACKEND_NUM_PHENOTYPE_ASSOCIATIONS"] numPhenotypeAssociationSets = app.config[ "SIMULATED_BACKEND_NUM_PHENOTYPE_ASSOCIATION_SETS"] numRnaQuantSets = app.config[ "SIMULATED_BACKEND_NUM_RNA_QUANTIFICATION_SETS"] numExpressionLevels = app.config[ "SIMULATED_BACKEND_NUM_EXPRESSION_LEVELS_PER_RNA_QUANT_SET"] dataRepository = datarepo.SimulatedDataRepository( randomSeed=randomSeed, numCalls=numCalls, variantDensity=variantDensity, numVariantSets=numVariantSets, numReferenceSets=numReferenceSets, numReferencesPerReferenceSet=numReferencesPerReferenceSet, numReadGroupsPerReadGroupSet=numReadGroupsPerReadGroupSet, numAlignments=numAlignmentsPerReadGroup, numPhenotypeAssociations=numPhenotypeAssociations, numPhenotypeAssociationSets=numPhenotypeAssociationSets, numRnaQuantSets=numRnaQuantSets, numExpressionLevels=numExpressionLevels) elif dataSource.scheme == "empty": dataRepository = datarepo.EmptyDataRepository() elif dataSource.scheme == "file": path = os.path.join(dataSource.netloc, dataSource.path) dataRepository = datarepo.SqlDataRepository(path) dataRepository.open(datarepo.MODE_READ) else: raise exceptions.ConfigurationException( "Unsupported data source scheme: " + dataSource.scheme) theBackend = backend.Backend(dataRepository) theBackend.setRequestValidation(app.config["REQUEST_VALIDATION"]) theBackend.setDefaultPageSize(app.config["DEFAULT_PAGE_SIZE"]) theBackend.setMaxResponseLength(app.config["MAX_RESPONSE_LENGTH"]) return theBackend
def __init__(self, localId, dataPath): self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._referenceSet = None self._dataset = datasets.Dataset("ds") self._readGroupInfos = {} self._readGroupSetInfo = None self._samFile = pysam.AlignmentFile(dataPath) self._readReferences() super(ReadGroupSetTest, self).__init__(localId, dataPath) self._readAlignmentInfo()
def setUp(self): self.request = protocol.SearchReadsRequest() self.backend = backend.Backend( datarepo.SimulatedDataRepository(numAlignments=0)) dataRepo = self.backend.getDataRepository() referenceSet = dataRepo.getReferenceSetByIndex(0) reference = referenceSet.getReferenceByIndex(0) self.request.reference_id = reference.getId() self.dataset = dataRepo.getDatasets()[0] self.readGroupSet = self.dataset.getReadGroupSets()[0]
def setUp(self): class FakeRequest(object): pass class FakeTopLevelObject(object): def toProtocolElement(self, tier=0): return self self.request = FakeRequest() self.request.page_token = "" self.num_objects = 3 self.objects = [FakeTopLevelObject() for j in range(self.num_objects)] self.backend = backend.Backend(datarepo.AbstractDataRepository())
def setUpClass(cls): cls.backend = backend.Backend( datarepo.SimulatedDataRepository(randomSeed=100, numDatasets=3, numVariantSets=3, numCalls=3, variantDensity=0.5, numReferenceSets=3, numReferencesPerReferenceSet=3, numReadGroupSets=3, numReadGroupsPerReadGroupSet=3, numAlignments=3, numRnaQuantSets=3)) cls.dataRepo = cls.backend.getDataRepository()
def add_local_client(self, local_client="simulated", datasets=None, featuresets=None, phenotypeassociationsets=None): """Add a g2p local client to manager.""" if isinstance(local_client, str) and local_client == 'simulated': repository = datarepo.SimulatedDataRepository() b = backend.Backend(repository) c = LocalClient(b, datasets, featuresets, phenotypeassociationsets) elif isinstance(local_client, LocalClient): c = local_client else: raise TypeError( 'Expected local_client to be "simulated" or LocalClient object' ) self.client_list.append(c)
import unittest from g2pf.clientManager import * from ga4gh.server import backend, datarepo from ga4gh.client import client b = backend.Backend(datarepo.SimulatedDataRepository()) class TestClientManager(unittest.TestCase): def setUp(self): self.http_client1 = HttpClient("http://1kgenomes.ga4gh.org") self.http_client2 = HttpClient("http://1kgenomes.ga4gh.org", datasets=["1kgenomes", "anotherOne"], featuresets=["f1", "f2"], phenotypeassociationsets=["p1", "p2"]) self.local_client1 = LocalClient(b) self.local_client2 = LocalClient(b, datasets=['1kgenomes', 'anotherOne'], featuresets=['f1', 'f2'], phenotypeassociationsets=['p1', 'p2']) self.manager = ClientManager() def test_add_clients(self): counts = list() counts.append(len(self.manager)) self.manager.add_http_client(self.http_client1) counts.append(len(self.manager)) self.manager.add_http_client(self.http_client2) counts.append(len(self.manager)) self.manager.add_local_client(self.local_client1)
args = parser.parse_args() registryDb = "ga4gh-example-data/registry.db" if args.profile == 'heap': backendClass = HeapProfilerBackend backend = backendClass(registryDb) args.repeatLimit = 1 args.pageLimit = 1 elif args.profile == 'cpu': backendClass = CpuProfilerBackend backend = backendClass(registryDb) else: repo = datarepo.SqlDataRepository(registryDb) repo.open(datarepo.MODE_READ) backend = backend.Backend(repo) # Get our list of callSetids callSetIds = args.callSetIds if callSetIds != []: callSetIds = None if args.callSetIds != "*": callSetIds = args.callSetIds.split(",") minTime = benchmarkOneQuery(_heavyQuery(args.variantSetId, callSetIds), args.repeatLimit, args.pageLimit) print(minTime) if args.profile == 'cpu': stats = pstats.Stats(backend.profiler) stats.sort_stats('time') stats.print_stats(.25)
def setUp(self): self._backend = backend.Backend(datarepo.AbstractDataRepository()) self._dataRepo = self._backend.getDataRepository()
def setUp(self): registryLocation = 'registry.db' dataRepository = datarepo.SqlDataRepository(registryLocation) dataRepository.open(datarepo.MODE_READ) self._backend = backend.Backend(dataRepository) self._client = client.LocalClient(self._backend)
def setUp(self): self._repo = datarepo.SqlDataRepository(paths.testDataRepo) self._repo.open(datarepo.MODE_READ) self._backend = backend.Backend(self._repo) self._client = client.LocalClient(self._backend)
def setUp(self): self.request = protocol.SearchVariantsRequest() self.backend = backend.Backend(datarepo.SimulatedDataRepository()) self.dataset = self.backend.getDataRepository().getDatasets()[0]