def head(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: response = http.head(url, **kwargs) if response.status_code not in [200]: Log.error(response.reason+": "+response.all_content) if self.debug: Log.note("response: {{response}}", response=strings.limit(utf82unicode(response.all_content), 130)) if response.all_content: details = wrap(convert.json2value(utf82unicode(response.all_content))) if details.error: Log.error(details.error) return details else: return None # WE DO NOT EXPECT content WITH HEAD REQUEST except Exception, e: Log.error("Problem with call to {{url}}", url= url, cause=e)
def head(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: response = http.head(url, **kwargs) if response.status_code not in [200]: Log.error(response.reason+": "+response.all_content) if self.debug: Log.note("response: {{response}}", response=strings.limit(utf82unicode(response.all_content), 130)) if response.all_content: details = wrap(mo_json.json2value(utf82unicode(response.all_content))) if details.error: Log.error(details.error) return details else: return None # WE DO NOT EXPECT content WITH HEAD REQUEST except Exception, e: Log.error("Problem with call to {{url}}", url= url, cause=e)
def __init__( self, hg=None, # CONNECT TO hg repo=None, # CONNECTION INFO FOR ES CACHE branches=None, # CONNECTION INFO FOR ES CACHE use_cache=False, # True IF WE WILL USE THE ES FOR DOWNLOADING BRANCHES timeout=30 * SECOND, kwargs=None): if not _hg_branches: _late_imports() self.es_locker = Lock() self.todo = mo_threads.Queue("todo for hg daemon", max=DAEMON_QUEUE_SIZE) self.settings = kwargs self.timeout = Duration(timeout) # VERIFY CONNECTIVITY with Explanation("Test connect with hg"): response = http.head(self.settings.hg.url) if branches == None: self.branches = _hg_branches.get_branches(kwargs=kwargs) self.es = None return self.last_cache_miss = Date.now() set_default(repo, {"schema": revision_schema}) self.es = elasticsearch.Cluster(kwargs=repo).get_or_create_index( kwargs=repo) def setup_es(please_stop): with suppress_exception: self.es.add_alias() with suppress_exception: self.es.set_refresh_interval(seconds=1) Thread.run("setup_es", setup_es) self.branches = _hg_branches.get_branches(kwargs=kwargs) self.timeout = timeout Thread.run("hg daemon", self._daemon)
def __init__( self, hg=None, # CONNECT TO hg repo=None, # CONNECTION INFO FOR ES CACHE branches=None, # CONNECTION INFO FOR ES CACHE use_cache=False, # True IF WE WILL USE THE ES FOR DOWNLOADING BRANCHES timeout=30 * SECOND, kwargs=None ): if not _hg_branches: _late_imports() self.es_locker = Lock() self.todo = mo_threads.Queue("todo for hg daemon", max=DAEMON_QUEUE_SIZE) self.settings = kwargs self.timeout = Duration(timeout) # VERIFY CONNECTIVITY with Explanation("Test connect with hg"): response = http.head(self.settings.hg.url) if branches == None: self.branches = _hg_branches.get_branches(kwargs=kwargs) self.es = None return self.last_cache_miss = Date.now() set_default(repo, {"schema": revision_schema}) self.es = elasticsearch.Cluster(kwargs=repo).get_or_create_index(kwargs=repo) def setup_es(please_stop): with suppress_exception: self.es.add_alias() with suppress_exception: self.es.set_refresh_interval(seconds=1) Thread.run("setup_es", setup_es) self.branches = _hg_branches.get_branches(kwargs=kwargs) self.timeout = timeout Thread.run("hg daemon", self._daemon)