def test_hypnos_cache_with_ingest(self): # Ensure that the cached data from a result with a ingest definition is msgpack serializable. self.thisHostMustNot(platform='windows') self.skipIfNoInternet() # testserver = Foo() gconf = get_fake_ipify_ingest_global_config(port=self.port) with s_remcycle.Hypnos( opts={ s_remcycle.MIN_WORKER_THREADS: 1, s_remcycle.CACHE_ENABLED: True }, ioloop=self.io_loop) as hypo_obj: # type: s_remcycle.Hypnos hypo_obj.addWebConfig(config=gconf) jid = hypo_obj.fireWebApi(name='fakeipify:jsonip') job = hypo_obj.web_boss.job(jid=jid) hypo_obj.web_boss.wait(jid) cached_data = hypo_obj.webCacheGet(jid=jid) self.nn(cached_data) # Ensure the cached data can be msgpacked as needed. buf = msgenpack(cached_data) self.true(isinstance(buf, bytes)) # Ensure that the existing job tufo is untouched when caching. self.true('ingdata' in job[1].get('task')[2].get('resp'))
def test_hypnos_content_type_skips(self): self.thisHostMustNot(platform='windows') self.skipIfNoInternet() gconf = get_fake_ipify_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) self.true('fakeipify:jsonip' in hypo_obj._web_apis) data = {} def ondone(job_tufo): _jid, jobd = job_tufo resp_data = jobd.get('task')[2].get('resp').get('data') data[_jid] = type(resp_data) jid1 = hypo_obj.fireWebApi('fakeipify:jsonip', ondone=ondone) hypo_obj.web_boss.wait(jid1) hypo_obj.webContentTypeSkipAdd('application/json') jid2 = hypo_obj.fireWebApi('fakeipify:jsonip', ondone=ondone) hypo_obj.web_boss.wait(jid2) hypo_obj.webContentTypeSkipDel('application/json') jid3 = hypo_obj.fireWebApi('fakeipify:jsonip', ondone=ondone) hypo_obj.web_boss.wait(jid3) self.true(jid1 in data) self.eq(data[jid1], type({})) self.true(jid2 in data) self.eq(data[jid2], type(b'')) self.true(jid3 in data) self.eq(data[jid3], type({}))
def test_hypnos_automatic_ingest(self): # Ensure that a configuration object with a ingest definition is automatically parsed. self.thisHostMustNot(platform='windows') self.skipIfNoInternet() gconf = get_fake_ipify_ingest_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) self.true('fakeipify:jsonip' in hypo_obj._syn_funcs) self.true('fakeipify:jsonip:ipv4' in hypo_obj.web_core._syn_funcs) data = {} def ondone(job_tufo): _jid, jobd = job_tufo _data = jobd.get('task')[2].get('resp', {}).get('data') _bytez = _data.read() _d = json.loads(_bytez.decode()) data[_jid] = _d.get('ret').get('ip') jid = hypo_obj.fireWebApi(name='fakeipify:jsonip', ondone=ondone) hypo_obj.web_boss.wait(jid) tufos = hypo_obj.web_core.getTufosByProp('inet:ipv4') self.eq(len(tufos), 1) # Validate the IP of the tufo is the same we got from ipify self.nn(data[jid]) self.eq(s_inet.ipv4str(tufos[0][1].get('inet:ipv4')), data[jid])
def test_hypnos_simple_fail(self): # Test a simple failure case self.thisHostMustNot(platform='windows') self.skipIfNoInternet() gconf = get_fake_ipify_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) data = {} def ondone(job_tufo): _jid, jobd = job_tufo data[_jid] = jobd jid = hypo_obj.fireWebApi(name='fakeipify:fake_endpoint', ondone=ondone) hypo_obj.web_boss.wait(jid) job = data.get(jid) # Ensure that we have error information propagated up to the job self.true('err' in job) self.eq(job.get('err'), 'HTTPError') self.true('errfile' in job) self.true('errline' in job) self.true('errmsg' in job) # Since our fail is going to be a http error we have some response data resp = job.get('task')[2].get('resp') self.true('code' in resp) self.true('request' in resp) self.true('headers' in resp)
def test_hypnos_default_callback(self): # Ensure that the default callback, of firing an event handler, works. self.thisHostMustNot(platform='windows') self.skipIfNoInternet() # testserver = Foo() gconf = get_fake_ipify_global_config(port=self.port) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) self.true('fakeipify:jsonip' in hypo_obj._web_apis) def func(event_tufo): event_name, argdata = event_tufo kwargs = argdata.get('kwargs') resp = kwargs.get('resp') self.eq(resp.get('code'), 200) hypo_obj.on('fakeipify:jsonip', func=func) jid = hypo_obj.fireWebApi('fakeipify:jsonip') job = hypo_obj.web_boss.job(jid) hypo_obj.web_boss.wait(jid) self.true(job[1].get('done'))
def test_hypnos_fire_api_callback(self): # Ensure that the provided callback is fired and args are passed to the callbacks. self.thisHostMustNot(platform='windows') # testserver = Foo() gconf = get_fake_ipify_global_config(port=self.port) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 2}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) d = {'set': False, 'keys': set([])} def cb(*args, **kwargs): self.true('foo' in args) self.true('bar' in args) self.true('resp' in kwargs) self.true('key' in kwargs) resp = kwargs.get('resp') self.eq(resp.get('code'), 200) d['set'] = True d['keys'].add(kwargs.get('key')) jid1 = hypo_obj.fireWebApi('fakeipify:rawip', 'foo', 'bar', key='12345', callback=cb) jid2 = hypo_obj.fireWebApi('fakeipify:rawip', 'foo', 'bar', key='67890', callback=cb) hypo_obj.web_boss.wait(jid=jid1) hypo_obj.web_boss.wait(jid=jid2) self.eq(d['set'], True) self.eq(d.get('keys'), {'12345', '67890'})
def test_hypnos_callback_ondone(self): self.thisHostMustNot(platform='windows') # testserver = Foo() gconf = get_fake_ipify_global_config(port=self.port) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) self.true('fakeipify:jsonip' in hypo_obj._web_apis) data = {} def ondone(job_tufo): _jid, jobd = job_tufo data[_jid] = jobd def cb(*args, **kwargs): resp = kwargs.get('resp') self.true(resp.get('code') == 200) data = resp.get('data') self.true('ret' in data) ret = data.get('ret') self.true('ip' in ret) jid = hypo_obj.fireWebApi('fakeipify:jsonip', callback=cb, ondone=ondone) job = hypo_obj.web_boss.job(jid) hypo_obj.web_boss.wait(jid) self.true(jid in data) self.true(job[1].get('done'))
def test_hypnos_config_bounds(self): self.thisHostMustNot(platform='windows') with self.raises(s_common.BadConfValu) as cm: hypo_obj = s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 0}, ioloop=self.io_loop) self.isin('web:worker:threads:min must be greater than 1', str(cm.exception)) with self.raises(s_common.BadConfValu) as cm: hypo_obj = s_remcycle.Hypnos(opts={s_remcycle.MAX_WORKER_THREADS: 1, s_remcycle.MIN_WORKER_THREADS: 2}, ioloop=self.io_loop) self.isin('web:worker:threads:max must be greater than the web:worker:threads:min', str(cm.exception)) with self.raises(s_common.BadConfValu) as cm: hypo_obj = s_remcycle.Hypnos(opts={s_remcycle.MAX_CLIENTS: 0, }, ioloop=self.io_loop) self.isin('web:tornado:max_clients must be greater than 1', str(cm.exception))
def test_hypnos_manual_ingest_via_eventbus(self): # This is a manual setup of the core / ingest type of action. self.thisHostMustNot(platform='windows') self.skipIfNoInternet() core = s_cortex.openurl('ram://') gconf = get_fake_ipify_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) data = {} ingest_def = { "ingest": { "forms": [["inet:ipv4", { "var": "ip" }]], "vars": [["ip", { "path": "ret/ip" }]] } } name = 'fakeipify:jsonip' core_name = ':'.join([name, 'ingest']) gest = s_ingest.Ingest(info=ingest_def) s_ingest.register_ingest(core=core, gest=gest, evtname=core_name) def glue(event): evtname, event_args = event kwargs = event_args.get('kwargs') resp = kwargs.get('resp') data = resp.get('data') core.fire(core_name, data=data) def ondone(job_tufo): _jid, jobd = job_tufo _data = jobd.get('task')[2].get('resp', {}).get('data', {}) ip = _data.get('ret', {}).get('ip', '') data[_jid] = ip hypo_obj.on(name=name, func=glue) jid = hypo_obj.fireWebApi(name=name, ondone=ondone) hypo_obj.web_boss.wait(jid) tufos = core.getTufosByProp('inet:ipv4') self.eq(len(tufos), 1) # Validate the IP of the tufo is the same we got from ipify self.eq(s_inet.ipv4str(tufos[0][1].get('inet:ipv4')), data[jid]) core.fini()
def test_hypnos_fini(self): # Ensure we call fini on all objects created by the core. self.thisHostMustNot(platform='windows') hypo_obj = s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) hypo_obj.fini() self.true(hypo_obj.isfini) self.true(hypo_obj.web_boss.isfini) self.false(hypo_obj.web_iothr.is_alive()) self.true(hypo_obj.web_core.isfini)
def test_hypnos_fini_core(self): # Ensure we don't tear down a Cortex provided to us by the constructor. self.thisHostMustNot(platform='windows') core = s_cortex.openurl('ram:///') hypo_obj = s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop, core=core) hypo_obj.fini() self.true(hypo_obj.isfini) self.true(hypo_obj.web_boss.isfini) self.false(hypo_obj.web_iothr.is_alive()) self.false(hypo_obj.web_core.isfini) core.fini() self.true(hypo_obj.web_core.isfini)
def test_hypnos_cache_with_failure(self): # Test a simple failure case self.thisHostMustNot(platform='windows') gconf = get_fake_ipify_global_config(port=self.port) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1, s_remcycle.CACHE_ENABLED: True}, ioloop=self.io_loop) as hypo_obj: # type: s_remcycle.Hypnos hypo_obj.addWebConfig(config=gconf) jid = hypo_obj.fireWebApi(name='fakeipify:fake_endpoint') hypo_obj.web_boss.wait(jid) # Ensure that we have error information cached for the job cached_data = hypo_obj.webCacheGet(jid=jid) self.true('err' in cached_data) self.eq(cached_data.get('err'), 'HTTPError') self.true('errfile' in cached_data) self.true('errline' in cached_data) self.true('errmsg' in cached_data)
def test_hypnos_default_callback_null(self): # Ensure the Job is complete even if we have no explicit callback or # listening event handlers. self.thisHostMustNot(platform='windows') gconf = get_fake_ipify_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: hypo_obj.addWebConfig(config=gconf) self.true('fakeipify:jsonip' in hypo_obj._web_apis) jid = hypo_obj.fireWebApi('fakeipify:jsonip') job = hypo_obj.web_boss.job(jid) hypo_obj.web_boss.wait(jid) self.true(job[1].get('done')) self.eq(job[1].get('task')[2].get('resp').get('code'), 200)
def test_hypnos_post_byts(self): self.thisHostMustNot(platform='windows') testserver = self.env.testserver # type: StandaloneTestServer self.false(testserver.nommer.nommed) byts = json.dumps({'foo': 'bar', 'baz': [1, 2, 3]}).encode() gconf = get_fake_ipify_global_config(port=self.port) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1, }, ioloop=self.io_loop) as hypo_obj: # type: s_remcycle.Hypnos hypo_obj.addWebConfig(config=gconf) jid = hypo_obj.fireWebApi(name='fakeipify:byts', api_args={'req_body': byts}) job = hypo_obj.web_boss.job(jid=jid)[1] # type: dict hypo_obj.web_boss.wait(jid) # Did the server actually nom a POST body? self.true(testserver.nommer.nommed) resp = job.get('task')[2].get('resp') # type: dict self.eq(resp.get('code'), 200) self.true(resp.get('data').get('ret'))
def test_hypnos_cache_job(self): # Ensure that job results are available via cache when caching is enabled. self.thisHostMustNot(platform='windows') self.skipIfNoInternet() gconf = get_fake_ipify_global_config(port=self.port) # testserver = Foo() with s_remcycle.Hypnos( opts={ s_remcycle.MIN_WORKER_THREADS: 2, s_remcycle.CACHE_ENABLED: True, }, ioloop=self.io_loop) as hypo_obj: # type: s_remcycle.Hypnos hypo_obj.addWebConfig(config=gconf) jid1 = hypo_obj.fireWebApi('fakeipify:jsonip') self.false(jid1 in hypo_obj.web_cache) hypo_obj.web_boss.wait(jid=jid1) time.sleep(0.01) self.true(jid1 in hypo_obj.web_cache) cached_data = hypo_obj.webCacheGet(jid=jid1) self.true(isinstance(cached_data, dict)) resp = cached_data.get('resp') self.true('data' in resp) # Cached response data is a bytes object data = json.loads(resp.get('data').decode()) # This is expected data from the API endpoint. self.true('ret' in data) self.true('ip' in data.get('ret')) cached_data2 = hypo_obj.webCachePop(jid=jid1) self.eq(cached_data, cached_data2) self.false(jid1 in hypo_obj.web_cache) # Disable the cache and ensure the responses are cleared and no longer cached. hypo_obj.setConfOpt(s_remcycle.CACHE_ENABLED, False) self.false(jid1 in hypo_obj.web_cache) jid2 = hypo_obj.fireWebApi('fakeipify:jsonip') hypo_obj.web_boss.wait(jid=jid1) time.sleep(0.01) self.false(jid2 in hypo_obj.web_cache) cached_data3 = hypo_obj.webCachePop(jid=jid2) self.none(cached_data3)
def test_hypnos_config_register_deregister(self): self.thisHostMustNot(platform='windows') vertex_conf = get_vertex_global_config() ipify_conf = get_fake_ipify_ingest_global_config() data = set([]) def func(eventdata): evtname, _ = eventdata data.add(evtname) with s_remcycle.Hypnos(opts={s_remcycle.MIN_WORKER_THREADS: 1}, ioloop=self.io_loop) as hypo_obj: # Register callbacks hypo_obj.on('hypnos:register:namespace:add', func) hypo_obj.on('hypnos:register:namespace:del', func) hypo_obj.on('hypnos:register:api:del', func) hypo_obj.on('hypnos:register:api:add', func) hypo_obj.addWebConfig(config=vertex_conf) self.true('vertexproject' in hypo_obj._web_namespaces) self.true('vertexproject' in hypo_obj._web_docs) self.true(len(hypo_obj._web_apis) == 3) self.true('vertexproject:http' in hypo_obj._web_apis) self.true('vertexproject:https' in hypo_obj._web_apis) self.eq(dict(hypo_obj._web_api_ingests), {}) # Test description data d = hypo_obj.getWebDescription() self.true(isinstance(d, dict)) self.true('vertexproject' in d) self.true('doc' in d['vertexproject']) self.true('vertexproject:http' in d['vertexproject']) self.true('doc' in d['vertexproject']['vertexproject:http']) hypo_obj.addWebConfig(config=ipify_conf) self.true('fakeipify' in hypo_obj._web_namespaces) self.true('fakeipify' in hypo_obj._web_docs) self.eq(len(hypo_obj._web_namespaces), 2) self.eq(len(hypo_obj._web_apis), 6) self.true('fakeipify:jsonip' in hypo_obj._web_apis) self.true('fakeipify:jsonip' in hypo_obj._web_api_ingests) self.true('fakeipify:jsonip' in hypo_obj._syn_funcs) self.true('fakeipify:jsonip:ipv4' in hypo_obj.web_core._syn_funcs) # Check repr! r = repr(hypo_obj) self.true('Hypnos' in r) self.true('vertexproject' in r) self.true('fakeipify' in r) self.true('synapse.cores.common.Cortex' in r) # Ensure that if we remove everything when we dereregister a namespace hypo_obj.delWebConf(namespace='fakeipify') self.true('fakeipify' not in hypo_obj._web_namespaces) self.true('fakeipify' not in hypo_obj._web_docs) self.eq(len(hypo_obj._web_namespaces), 1) self.eq(len(hypo_obj._web_apis), 3) self.true('fakeipify:jsonip' not in hypo_obj._web_apis) self.true('fakeipify:jsonip' not in hypo_obj._web_api_ingests) self.true('fakeipify:jsonip' not in hypo_obj._syn_funcs) self.true( 'fakeipify:jsonip:ipv4' not in hypo_obj.web_core._syn_funcs) # Trying to re-register a present namespace should fail with self.raises(NameError) as cm: hypo_obj.addWebConfig(config=vertex_conf, reload_config=False) self.true('Namespace is already registered' in str(cm.exception)) # Register ipfy again hypo_obj.addWebConfig(config=ipify_conf) self.true('fakeipify' in hypo_obj._web_namespaces) self.true('fakeipify' in hypo_obj._web_docs) self.eq(len(hypo_obj._web_namespaces), 2) self.eq(len(hypo_obj._web_apis), 6) self.true('fakeipify:jsonip' in hypo_obj._web_apis) self.true('fakeipify:jsonip' in hypo_obj._web_api_ingests) self.true('fakeipify:jsonip' in hypo_obj._syn_funcs) self.true('fakeipify:jsonip:ipv4' in hypo_obj.web_core._syn_funcs) self.true('fakeipify:jsonip' in hypo_obj._web_api_gest_opens) # Now change something with ipify, register it and force a reload to occur api_def = ipify_conf['apis'].pop(0) gest_def = api_def[1]['ingest'] gest_def['name'] = 'foobar' ipify_conf['apis'].append(['duckip', api_def[1]]) hypo_obj.addWebConfig(config=ipify_conf) self.true('fakeipify' in hypo_obj._web_namespaces) self.true('fakeipify' in hypo_obj._web_docs) self.eq(len(hypo_obj._web_namespaces), 2) self.eq(len(hypo_obj._web_apis), 6) self.true('fakeipify:jsonip' not in hypo_obj._web_apis) self.true('fakeipify:jsonip' not in hypo_obj._web_api_ingests) self.true('fakeipify:jsonip' not in hypo_obj._syn_funcs) self.true( 'fakeipify:jsonip:ipv4' not in hypo_obj.web_core._syn_funcs) self.true('fakeipify:jsonip' not in hypo_obj._web_api_gest_opens) self.true('fakeipify:duckip' in hypo_obj._web_apis) self.true('fakeipify:duckip' in hypo_obj._web_api_ingests) self.true('fakeipify:duckip' in hypo_obj._syn_funcs) self.true( 'fakeipify:duckip:foobar' in hypo_obj.web_core._syn_funcs) self.true('fakeipify:duckip' in hypo_obj._web_api_gest_opens) # ensure all the expected events fired during testing self.true('hypnos:register:namespace:add' in data) self.true('hypnos:register:namespace:del' in data) self.true('hypnos:register:api:add' in data) self.true('hypnos:register:api:del' in data)