def test_saveWork_single_timeout(self, testInstance, mocker): testInstance.work = mocker.MagicMock() testInstance.work.save.side_effect = [ConnectionTimeout('test'), None] testInstance.saveWork() assert testInstance.work.save.call_count == 2
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url_path = url if params: url_path = '%s?%s' % (url, urlencode(params or {})) url = self.base_url + url_path start = self.loop.time() response = None try: with aiohttp.Timeout(timeout or self.timeout): response = yield from self.session.request(method, url, data=body) raw_data = yield from response.text() duration = self.loop.time() - start except Exception as e: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=e) if isinstance(e, asyncio.TimeoutError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, url_path, body, duration, status_code=response.status, response=raw_data) self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) return response.status, response.headers, raw_data
def perform(self, timeout=0.1): """ Loop on waiting handles to process them until they are no more waiting one and all send are finished. It's never finished until closed for end of all processing, don't wait for it on loop :param timeout: the timeout for the loop :return: Nothing """ while self.running: if len(self.handles) == 0: # no activity, just sleep, for new queries yield from self._try_load_queries(True, timeout) else: yield from self._try_load_queries(False) if len(self.handles) == 0: continue # wait for something to happen selected = self.multi.select(timeout) if selected < 0: continue # it was not a select time out, something to do ret, num_handles = self._perform_loop() if ret > 0: raise ConnectionError("pycurl failed", ret) if len(self.handles) == 0: continue else: # some handles to process (waiting, succeded, failed) = self.multi.info_read() for handle in succeded: self.handles.remove(handle) status = handle.getinfo(pycurl.RESPONSE_CODE) self.multi.remove_handle(handle) content_type, decoded = decode_body(handle) if not self.running: # is stopped, just swallow content continue elif status >= 200 and status < 300: handle.cb(status, handle.headers, decoded) elif status >= 300: handle.f_cb( return_error( status, decoded, content_type, http_message=handle.headers.pop('__STATUS__'), url=handle.getinfo(pycurl.EFFECTIVE_URL))) for handle, code, message in failed: self.handles.remove(handle) self.multi.remove_handle(handle) if code == pycurl.E_OPERATION_TIMEDOUT: ex = ConnectionTimeout( code, message, handle.getinfo(pycurl.EFFECTIVE_URL), handle.getinfo(pycurl.TOTAL_TIME)) else: ex = PyCurlException( code, handle.errstr(), handle.getinfo(pycurl.EFFECTIVE_URL)) handle.f_cb(ex)
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None): url = self.base_url + url if params: url = '%s?%s' % (url, urlencode(params or {})) start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) prepared_request = self.session.prepare_request(request) settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None) send_kwargs = {'timeout': timeout or self.timeout} send_kwargs.update(settings) try: response = self.session.request(prepared_request.method, prepared_request.url, data=prepared_request.body, headers=prepared_request.headers, **send_kwargs) duration = time.time() - start raw_data = response.text except Exception as e: self.log_request_fail(method, url, prepared_request.path_url, body, time.time() - start, exception=e) if isinstance(e, requests.exceptions.SSLError): raise SSLError('N/A', str(e), e) if isinstance(e, requests.Timeout): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status_code < 300) and response.status_code not in ignore: self.log_request_fail(method, url, response.request.path_url, body, duration, response.status_code, raw_data) self._raise_error(response.status_code, raw_data) self.log_request_success(method, url, response.request.path_url, body, response.status_code, raw_data, duration) return response.status_code, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None): url_path = url if params: query_string = urlencode(params) else: query_string = "" # Provide correct URL object to avoid string parsing in low-level code url = yarl.URL.build(scheme=self.scheme, host=self.hostname, port=self.port, path=url, query_string=query_string, encoded=True) start = self.loop.time() response = None try: request_timeout = timeout or self.timeout.total with async_timeout.timeout(request_timeout, loop=self.loop): # override the default session timeout explicitly response = yield from self.session.request(method, url, data=body, headers=headers, timeout=request_timeout) raw_data = yield from response.text() duration = self.loop.time() - start except asyncio.CancelledError: raise except Exception as e: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=e) if isinstance(e, ServerFingerprintMismatch): raise SSLError('N/A', str(e), e) if isinstance(e, asyncio.TimeoutError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, url_path, body, duration, status_code=response.status, response=raw_data) self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) return response.status, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): # noqa url_path = url url = (self.base_url / url.lstrip('/')).with_query(params) start = self.loop.time() response = None try: with aiohttp.Timeout(timeout or self.timeout, loop=self.loop): # noqa response = yield from self.session.request(method, url, data=body, headers=self.headers, timeout=None) # noqa raw_data = yield from response.text() duration = self.loop.time() - start except asyncio.TimeoutError as exc: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=exc) # noqa raise ConnectionTimeout('TIMEOUT', str(exc), exc) except FingerprintMismatch as exc: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=exc) # noqa raise SSLError('N/A', str(exc), exc) except ClientError as exc: self.log_request_fail(method, url, url_path, body, self.loop.time() - start, exception=exc) # noqa raise ConnectionError('N/A', str(exc), exc) finally: if response is not None: yield from response.release() # raise errors based on http status codes, let the client handle those if needed # noqa if not (200 <= response.status < 300) and response.status not in ignore: # noqa self.log_request_fail(method, url, url_path, body, duration, response.status, raw_data) # noqa self._raise_error(response.status, raw_data) self.log_request_success(method, url, url_path, body, response.status, raw_data, duration) # noqa return response.status, response.headers, raw_data
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url = self.base_url + url if params: url = '%s?%s' % (url, urlencode(params or {})) start = time.time() headers = self.headers.copy() try: response = urlfetch.Fetch(url, payload=body, method=method, headers=headers, allow_truncated=False, follow_redirects=True, deadline=timeout, validate_certificate=self.verify_certs) duration = time.time() - start raw_data = response.content except Exception as e: self.log_request_fail(method, url, url, body, time.time() - start, exception=e) if isinstance(e, urlfetch_errors.SSLCertificateError): raise SSLError('N/A', str(e), e) if isinstance(e, urlfetch_errors.DeadlineExceededError): raise ConnectionTimeout('TIMEOUT', str(e), e) raise ConnectionError('N/A', str(e), e) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status_code < 300) and response.status_code not in ignore: self.log_request_fail(method, url, url, body, duration) self._raise_error(response.status_code, raw_data) self.log_request_success(method, url, url, body, response.status_code, raw_data, duration) return response.status_code, response.headers, raw_data
def test_query_variants(self, mock_get_variants, mock_get_gene_counts, mock_error_logger, mock_analyst_group): url = reverse(query_variants_handler, args=['abc']) self.check_collaborator_login( url, request_data={'projectFamilies': PROJECT_FAMILIES}) url = reverse(query_variants_handler, args=[SEARCH_HASH]) # add a locus list LocusList.objects.get(guid=LOCUS_LIST_GUID).projects.add( Project.objects.get(guid=PROJECT_GUID)) # Test invalid inputs response = self.client.get(url) self.assertEqual(response.status_code, 400) self.assertEqual(response.reason_phrase, 'Invalid search hash: {}'.format(SEARCH_HASH)) mock_error_logger.assert_not_called() response = self.client.post(url, content_type='application/json', data=json.dumps({'search': SEARCH})) self.assertEqual(response.status_code, 400) self.assertEqual(response.reason_phrase, 'Invalid search: no projects/ families specified') mock_error_logger.assert_not_called() mock_get_variants.side_effect = InvalidIndexException('Invalid index') response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 400) self.assertEqual(response.json()['error'], 'Invalid index') mock_error_logger.assert_called_with('Invalid index', extra=mock.ANY) mock_get_variants.side_effect = InvalidSearchException( 'Invalid search') mock_error_logger.reset_mock() response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 400) self.assertEqual(response.json()['error'], 'Invalid search') mock_error_logger.assert_not_called() mock_get_variants.side_effect = ConnectionTimeout( '', '', ValueError('Timeout')) response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 504) self.assertEqual(response.json()['error'], 'ConnectionTimeout caused by - ValueError(Timeout)') mock_error_logger.assert_not_called() mock_get_variants.side_effect = TransportError( 'N/A', 'search_phase_execution_exception', {'error': 'Invalid'}) response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 400) self.assertEqual( response.json()['error'], "TransportError: N/A - 'search_phase_execution_exception' - 'Invalid'" ) self.assertEqual(response.json()['detail'], {'error': 'Invalid'}) mock_error_logger.assert_not_called() error_info_json = { 'error': { 'root_cause': [{ 'type': 'response_handler_failure_transport_exception' }] } } mock_get_variants.side_effect = TransportError( '401', 'search_phase_execution_exception', error_info_json) response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 401) self.assertEqual( response.json()['error'], "TransportError: 401 - 'search_phase_execution_exception' - response_handler_failure_transport_exception" ) self.assertEqual(response.json()['detail'], error_info_json) mock_error_logger.assert_not_called() mock_get_variants.side_effect = _get_es_variants # Test new search response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 200) response_json = response.json() self.assertSetEqual( set(response_json.keys()), { 'searchedVariants', 'savedVariantsByGuid', 'genesById', 'search', 'variantTagsByGuid', 'variantNotesByGuid', 'variantFunctionalDataByGuid', 'locusListsByGuid' }) self.assertListEqual(response_json['searchedVariants'], VARIANTS) self.assertDictEqual( response_json['search'], { 'search': SEARCH, 'projectFamilies': [{ 'projectGuid': PROJECT_GUID, 'familyGuids': mock.ANY }], 'totalResults': 3, }) self.assertSetEqual( set(response_json['search']['projectFamilies'][0]['familyGuids']), {'F000001_1', 'F000002_2'}) self.assertSetEqual(set(response_json['savedVariantsByGuid'].keys()), { 'SV0000001_2103343353_r0390_100', 'SV0000002_1248367227_r0390_100' }) self.assertSetEqual( set(response_json['genesById'].keys()), {'ENSG00000227232', 'ENSG00000268903', 'ENSG00000233653'}) gene_fields = {'locusListGuids'} gene_fields.update(GENE_VARIANT_FIELDS) self.assertSetEqual( set(response_json['genesById']['ENSG00000227232'].keys()), gene_fields) self.assertListEqual( response_json['genesById']['ENSG00000227232']['locusListGuids'], [LOCUS_LIST_GUID]) self.assertSetEqual(set(response_json['locusListsByGuid'].keys()), {LOCUS_LIST_GUID}) intervals = response_json['locusListsByGuid'][LOCUS_LIST_GUID][ 'intervals'] self.assertEqual(len(intervals), 2) self.assertSetEqual( set(intervals[0].keys()), { 'locusListGuid', 'locusListIntervalGuid', 'genomeVersion', 'chrom', 'start', 'end' }) results_model = VariantSearchResults.objects.get( search_hash=SEARCH_HASH) mock_get_variants.assert_called_with(results_model, sort='xpos', page=1, num_results=100, skip_genotype_filter=False) mock_error_logger.assert_not_called() # Test pagination response = self.client.get('{}?page=3'.format(url)) self.assertEqual(response.status_code, 200) mock_get_variants.assert_called_with(results_model, sort='xpos', page=3, num_results=100, skip_genotype_filter=False) mock_error_logger.assert_not_called() # Test sort response = self.client.get('{}?sort=pathogenicity'.format(url)) self.assertEqual(response.status_code, 200) mock_get_variants.assert_called_with(results_model, sort='pathogenicity', page=1, num_results=100, skip_genotype_filter=False) mock_error_logger.assert_not_called() # Test export export_url = reverse(export_variants_handler, args=[SEARCH_HASH]) response = self.client.get(export_url) self.assertEqual(response.status_code, 200) expected_content = [ [ 'chrom', 'pos', 'ref', 'alt', 'gene', 'worst_consequence', '1kg_freq', 'exac_freq', 'gnomad_genomes_freq', 'gnomad_exomes_freq', 'topmed_freq', 'cadd', 'revel', 'eigen', 'polyphen', 'sift', 'muttaster', 'fathmm', 'rsid', 'hgvsc', 'hgvsp', 'clinvar_clinical_significance', 'clinvar_gold_stars', 'filter', 'family_id_1', 'tags_1', 'notes_1', 'family_id_2', 'tags_2', 'notes_2', 'sample_1', 'num_alt_alleles_1', 'gq_1', 'ab_1', 'sample_2', 'num_alt_alleles_2', 'gq_2', 'ab_2' ], [ '21', '3343400', 'GAGA', 'G', 'WASH7P', 'missense_variant', '', '', '', '', '', '', '', '', '', '', '', '', '', 'ENST00000623083.3:c.1075G>A', 'ENSP00000485442.1:p.Gly359Ser', '', '', '', '1', 'Tier 1 - Novel gene and phenotype (None)|Review (None)', '', '2', '', '', 'NA19675', '1', '46.0', '0.702127659574', 'NA19679', '0', '99.0', '0.0' ], [ '3', '835', 'AAAG', 'A', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '1', '', '', '', '', '', 'NA19679', '0', '99.0', '0.0', '', '', '', '' ], [ '12', '48367227', 'TC', 'T', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '2', 'Known gene for phenotype (None)|Excluded (None)', 'test n\xf8te (None)', '', '', '', '', '', '', '', '', '', '', '' ] ] self.assertEqual( response.content, ('\n'.join(['\t'.join(line) for line in expected_content]) + '\n').encode('utf-8')) mock_get_variants.assert_called_with(results_model, page=1, load_all=True) mock_error_logger.assert_not_called() # Test gene breakdown gene_counts = { 'ENSG00000227232': { 'total': 2, 'families': { 'F000001_1': 2, 'F000002_2': 1 } }, 'ENSG00000268903': { 'total': 1, 'families': { 'F000002_2': 1 } } } mock_get_gene_counts.return_value = gene_counts gene_breakdown_url = reverse(get_variant_gene_breakdown, args=[SEARCH_HASH]) response = self.client.get(gene_breakdown_url) self.assertEqual(response.status_code, 200) response_json = response.json() self.assertSetEqual(set(response_json.keys()), {'searchGeneBreakdown', 'genesById'}) self.assertDictEqual(response_json['searchGeneBreakdown'], {SEARCH_HASH: gene_counts}) self.assertSetEqual(set(response_json['genesById'].keys()), {'ENSG00000227232', 'ENSG00000268903'}) gene_fields = { 'constraints', 'omimPhenotypes', 'mimNumber', 'cnSensitivity' } gene_fields.update(GENE_FIELDS) self.assertSetEqual( set(response_json['genesById']['ENSG00000227232'].keys()), gene_fields) # Test compound hets mock_get_variants.side_effect = _get_compound_het_es_variants response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 200) response_json = response.json() self.assertSetEqual( set(response_json.keys()), { 'searchedVariants', 'savedVariantsByGuid', 'genesById', 'search', 'variantTagsByGuid', 'variantNotesByGuid', 'variantFunctionalDataByGuid', 'locusListsByGuid' }) self.assertListEqual(response_json['searchedVariants'], COMP_HET_VARAINTS) self.assertSetEqual(set(response_json['savedVariantsByGuid'].keys()), {'SV0000002_1248367227_r0390_100'}) self.assertSetEqual(set(response_json['genesById'].keys()), {'ENSG00000233653'}) mock_error_logger.assert_not_called() # Test cross-project discovery for analyst users self.login_analyst_user() mock_get_variants.side_effect = _get_es_variants response = self.client.get('{}?sort=pathogenicity'.format(url)) self.assertEqual(response.status_code, 403) mock_analyst_group.__bool__.return_value = True mock_analyst_group.resolve_expression.return_value = 'analysts' response = self.client.get('{}?sort=pathogenicity'.format(url)) self.assertEqual(response.status_code, 200) response_json = response.json() self.assertSetEqual( set(response_json.keys()), { 'searchedVariants', 'savedVariantsByGuid', 'genesById', 'search', 'variantTagsByGuid', 'variantNotesByGuid', 'variantFunctionalDataByGuid', 'familiesByGuid', 'locusListsByGuid' }) self.assertListEqual(response_json['searchedVariants'], VARIANTS_WITH_DISCOVERY_TAGS) self.assertSetEqual(set(response_json['familiesByGuid'].keys()), {'F000011_11'}) mock_get_variants.assert_called_with(results_model, sort='pathogenicity_hgmd', page=1, num_results=100, skip_genotype_filter=False) mock_error_logger.assert_not_called() # Test no results mock_get_variants.side_effect = _get_empty_es_variants response = self.client.post(url, content_type='application/json', data=json.dumps({ 'projectFamilies': PROJECT_FAMILIES, 'search': SEARCH })) self.assertEqual(response.status_code, 200) response_json = response.json() self.assertDictEqual( response_json, { 'searchedVariants': [], 'search': { 'search': SEARCH, 'projectFamilies': PROJECT_FAMILIES, 'totalResults': 0, } }) mock_error_logger.assert_not_called()
def test_saveWork_multi_timeout_raise(self, testInstance, mocker): testInstance.work = mocker.MagicMock() testInstance.work.save.side_effect = ConnectionTimeout('test') with pytest.raises(ConnectionTimeout): testInstance.saveWork()