def postCell(self): if self.neuraddr is None: raise s_common.BadConfValu(mesg='BlobCell requires a neuron') path = self.getCellDir('blobs.lmdb') mapsize = self.getConfOpt('blob:mapsize') self.blobs = BlobStor(path, mapsize=mapsize) self.cloneof = self.getConfOpt('blob:cloneof') if self.cloneof is not None: self.cellpool.add(self.cloneof, self._fireCloneThread) self.cellinfo['blob:cloneof'] = self.cloneof
def _parseConfig(self): for key in self.required_keys: if key not in self._raw_config: logger.error('Remcycle config is missing a required value %s.', key) raise s_common.NoSuchName(name=key, mesg='Missing required key.') self.url_template = self._raw_config.get('url') self.doc = self._raw_config.get('doc') self.url_vars.update(self._raw_config.get('vars', {})) self.request_defaults = self._raw_config.get('http', {}) self._parseGestConfig(self._raw_config.get('ingest')) self.api_args.extend(self._raw_config.get('api_args', [])) for key in self.reserved_api_args: if key in self.api_args: raise s_common.BadConfValu(name=key, valu=None, mesg='Reserved api_arg used.') self.api_kwargs.update(self._raw_config.get('api_optargs', {})) # Set effective url self.effective_url = self.url_template.format(**self.url_vars)
def postCell(self): if self.cellpool is None: raise s_common.BadConfValu( mesg='AxonCell requires a neuron and CellPool') mapsize = self.getConfOpt('axon:mapsize') path = self.getCellDir('axon.lmdb') self.lenv = lmdb.open(path, writemap=True, max_dbs=128) self.lenv.set_mapsize(mapsize) self.blobhas = self.lenv.open_db(b'axon:blob:has') # <sha256>=<size> self.bloblocs = self.lenv.open_db( b'axon:blob:locs') # <sha256><loc>=<buid> self.metrics = s_lmdb.Metrics(self.lenv) self.blobs = s_cell.CellPool(self.cellauth, self.neuraddr) self.blobs.neurwait(timeout=10) for name in self.getConfOpt('axon:blobs'): self.blobs.add(name)
def __init__(self, core=None, opts=None, *args, **kwargs): s_config.Config.__init__(self) # Runtime-settable options self.onConfOptSet(CACHE_ENABLED, self._onSetWebCache) self.onConfOptSet(CACHE_TIMEOUT, self._onSetWebCacheTimeout) # Things we need prior to loading in conf values self.web_boss = s_async.Boss() self.web_cache = s_cache.Cache() self.web_cache_enabled = False if opts: self.setConfOpts(opts) self._web_required_keys = ('namespace', 'doc', 'apis') self._web_apis = {} self._web_namespaces = set([]) self._web_docs = {} self._web_default_http_args = { } # Global request headers per namespace # Check configable options before we spin up any more resources max_clients = self.getConfOpt(MAX_CLIENTS) pool_min = self.getConfOpt(MIN_WORKER_THREADS) pool_max = self.getConfOpt(MAX_WORKER_THREADS) if pool_min < 1: raise s_common.BadConfValu( name=MIN_WORKER_THREADS, valu=pool_min, mesg='web:worker:threads:min must be greater than 1') if pool_max < pool_min: raise s_common.BadConfValu( name=MAX_WORKER_THREADS, valu=pool_max, mesg= 'web:worker:threads:max must be greater than the web:worker:threads:min' ) if max_clients < 1: raise s_common.BadConfValu( name=MAX_CLIENTS, valu=max_clients, mesg='web:tornado:max_clients must be greater than 1') # Tornado Async loop = kwargs.get('ioloop') if loop is None: loop = t_ioloop.IOLoop() self.web_loop = loop self.web_client = t_http.AsyncHTTPClient(io_loop=self.web_loop, max_clients=max_clients) self.web_iothr = self._runIoLoop() # Synapse Async and thread pool self.web_pool = s_threads.Pool(pool_min, pool_max) # Synapse Core and ingest tracking if core is None: core = s_cortex.openurl('ram://') self.onfini(core.fini) self.web_core = core self._web_api_ingests = collections.defaultdict(list) self._web_api_gest_opens = {} # Setup Fini handlers self.onfini(self._onHypoFini) # List of content-type headers to skip automatic decoding self._web_content_type_skip = set([]) self.webContentTypeSkipAdd('application/octet-stream') for ct in kwargs.get('content_type_skip', []): self.webContentTypeSkipAdd(ct)