def test_fetcher_service_only(self): raise unittest.SkipTest('Timing out on EC2') """ Use fetcher service to get a complete dataset, try to decode the message fields once we get it. """ services = [ { 'name': 'fetcher', 'module': 'ion.services.sa.fetcher', 'class': 'FetcherService' }, ] sup = yield self._spawn_processes(services) fc = FetcherClient(proc=sup) dset = yield fc.get_dap_dataset(TEST_DSET) # decode fields as an integrity test dset['das'] = json.loads(dset['das']) dset['dds'] = json.loads(dset['dds']) dset['dods'] = base64.b64decode(dset['dods']) self.failUnlessEqual(dset['source_url'], TEST_DSET) self.failUnlessSubstring('COADSX', dset['das']) self.failUnlessSubstring('COADSY', dset['das'])
def slc_init(self): """ Service life cycle state. Initialize service here. Can use yields. @todo Create instances of clients here for later - fetcher, attr store, etc """ logging.debug('Preservation coordinator SLC init') self.fc = FetcherClient(proc=self)
def setUp(self): yield self._start_container() services = [ { 'name': 'fetcher', 'module': 'ion.services.sa.fetcher', 'class': 'FetcherService' }, ] sup = yield self._spawn_processes(services) self.fc = FetcherClient(proc=sup)
class CoordinatorService(BaseService): """ Refactor this into a BaseService that provides dap data on a looping call Make the url a parameter of the process - one per url... Brains behind preservation, and also the primary interface. """ # Define ourselves for the CC declare = BaseService.service_declare(name='coordinator', version='0.1.0', dependencies=['fetcher']) def slc_init(self): """ Service life cycle state. Initialize service here. Can use yields. @todo Create instances of clients here for later - fetcher, attr store, etc """ logging.debug('Preservation coordinator SLC init') self.fc = FetcherClient(proc=self) @defer.inlineCallbacks def op_get_url(self, content, headers, msg): """ @brief Method for proxy - request a (DAP) URL @param content URL to fetch @param headers conv-id and reply-to should point to proxy/requester @param msg Not used @todo Cache logic - right now just trapdoors all reqs to fetcher """ logging.debug('Coordinator forwarding URL request to fetcher') yield self.fc.forward_get_url(content, headers) @defer.inlineCallbacks def op_get_dap_dataset(self, content, headers, msg): """ @brief Similar to op_get_url. Fetches an entire DAP dataset. @param content URL to fetch @param headers conv-id and reply-to should point to proxy/requester @param msg Not used @todo Cache logic - right now just trapdoors all reqs to fetcher """ yield self.fc.forward_get_dap_dataset(content, headers)
def test_fetcher_service_only(self): raise unittest.SkipTest('Timing out on EC2') """ Use fetcher service to get a complete dataset, try to decode the message fields once we get it. """ services = [ {'name': 'fetcher', 'module': 'ion.services.sa.fetcher', 'class': 'FetcherService'}, ] sup = yield self._spawn_processes(services) fc = FetcherClient(proc=sup) dset = yield fc.get_dap_dataset(TEST_DSET) # decode fields as an integrity test dset['das'] = json.loads(dset['das']) dset['dds'] = json.loads(dset['dds']) dset['dods'] = base64.b64decode(dset['dods']) self.failUnlessEqual(dset['source_url'], TEST_DSET) self.failUnlessSubstring('COADSX', dset['das']) self.failUnlessSubstring('COADSY', dset['das'])
class FetcherTest(IonTestCase): @defer.inlineCallbacks def setUp(self): yield self._start_container() services = [ { 'name': 'fetcher', 'module': 'ion.services.sa.fetcher', 'class': 'FetcherService' }, ] sup = yield self._spawn_processes(services) self.fc = FetcherClient(proc=sup) @defer.inlineCallbacks def tearDown(self): yield self._stop_container() @defer.inlineCallbacks def _get_page(self, src_url): logging.debug('sending GET request for "%s"...' % src_url) res = yield self.fc.get_url(src_url) if res['status'] == 'ERROR': raise ValueError('Error on fetch') msg = base64.b64decode(res['value']) defer.returnValue(msg) @defer.inlineCallbacks def _get_phead(self, src_url): logging.debug('sending HEAD request for "%s"...' % src_url) res = yield self.fc.get_head(src_url) if res['status'] == 'ERROR': raise ValueError('Error on fetch') msg = base64.b64decode(res['value']) defer.returnValue(msg) ############################################### def test_instantiation_only(self): pass @defer.inlineCallbacks def test_single_get(self): """ Simplest test, fetch a fixed local page. @note Contenst of same in /var/www/tmp on amoeba.ucsd.edu """ res = yield self._get_page('http://amoeba.ucsd.edu/tmp/test1.txt') self.failUnlessSubstring('Now is the time for all good men', res) self.failUnlessSubstring('content-length', res) @defer.inlineCallbacks def test_page_head(self): """ Similar to get, but using HEAD verb to just pull headers. """ res = yield self._get_phead('http://amoeba.ucsd.edu/tmp/test1.txt') self.failUnlessSubstring('content-length', res) @defer.inlineCallbacks def test_404(self): try: yield self._get_page('http://ooici.net/404-fer-sure') self.fail('Should have gotten an exception for 404 error!') except ValueError: pass @defer.inlineCallbacks def test_header_404(self): try: yield self._get_phead('http://ooici.net/404-fer-sure') self.fail('Should have gotten an exception for 404 error!') except ValueError: pass
def setUp(self): yield self._start_container() services = [{'name':'fetcher', 'module':'ion.services.sa.fetcher', 'class': 'FetcherService'},] sup = yield self._spawn_processes(services) self.fc = FetcherClient(proc=sup)
class FetcherTest(IonTestCase): @defer.inlineCallbacks def setUp(self): yield self._start_container() services = [{'name':'fetcher', 'module':'ion.services.sa.fetcher', 'class': 'FetcherService'},] sup = yield self._spawn_processes(services) self.fc = FetcherClient(proc=sup) @defer.inlineCallbacks def tearDown(self): yield self._stop_container() @defer.inlineCallbacks def _get_page(self, src_url): logging.debug('sending GET request for "%s"...' % src_url) res = yield self.fc.get_url(src_url) if res['status'] == 'ERROR': raise ValueError('Error on fetch') msg = base64.b64decode(res['value']) defer.returnValue(msg) @defer.inlineCallbacks def _get_phead(self, src_url): logging.debug('sending HEAD request for "%s"...' % src_url) res = yield self.fc.get_head(src_url) if res['status'] == 'ERROR': raise ValueError('Error on fetch') msg = base64.b64decode(res['value']) defer.returnValue(msg) ############################################### def test_instantiation_only(self): pass @defer.inlineCallbacks def test_single_get(self): """ Simplest test, fetch a fixed local page. @note Contenst of same in /var/www/tmp on amoeba.ucsd.edu """ res = yield self._get_page('http://amoeba.ucsd.edu/tmp/test1.txt') self.failUnlessSubstring('Now is the time for all good men', res) self.failUnlessSubstring('content-length', res) @defer.inlineCallbacks def test_page_head(self): """ Similar to get, but using HEAD verb to just pull headers. """ res = yield self._get_phead('http://amoeba.ucsd.edu/tmp/test1.txt') self.failUnlessSubstring('content-length', res) @defer.inlineCallbacks def test_404(self): try: yield self._get_page('http://ooici.net/404-fer-sure') self.fail('Should have gotten an exception for 404 error!') except ValueError: pass @defer.inlineCallbacks def test_header_404(self): try: yield self._get_phead('http://ooici.net/404-fer-sure') self.fail('Should have gotten an exception for 404 error!') except ValueError: pass