def __safe_call(*args, **kwargs): client = None keepalive = True if "_nokeepalive" in kwargs: keepalive = not kwargs.pop("_nokeepalive") try: corpus = kwargs.pop("corpus") except: corpus = "" fail = format_error("corpus argument missing") else: fail = format_error({"corpus_id": corpus, "ready": self.factory.test_corpus(corpus), "status": self.factory.status_corpus(corpus), "message": "Corpus is not started"}) if corpus in self.factory.corpora: if keepalive: self.factory.corpora[corpus].lastcall = time.time() client = getattr(self.factory.corpora[corpus], "client_%s" % type_client) if fail["message"]["status"] == "error": fail["message"]["message"] = self.factory.corpora[corpus].error if hasattr(client, 'threadpool'): if self.factory.test_corpus(corpus): return deferToThreadPool(reactor, client.threadpool, client.__thrift_call__, call, *args, **kwargs) return defer.succeed(fail) if self.factory.test_corpus(corpus): return client.__thrift_call__(call, *args, **kwargs) return fail
def send_scrapy_query(self, action, arguments=None): url = "%s%s.json" % (self.scrapyd, action) method = "POST" headers = None if action.startswith('list'): method = "GET" if arguments: args = [ str(k) + '=' + str(v) for (k, v) in arguments.iteritems() ] url += '?' + '&'.join(args) arguments = None elif arguments: arguments = urlencode(arguments) headers = {'Content-Type': 'application/x-www-form-urlencoded'} try: res = yield getPage(url, method=method, postdata=arguments, \ headers=headers, timeout=30) result = loadjson(res) returnD(result) except ConnectionRefusedError: returnD(format_error("Could not contact scrapyd server, " + \ "maybe it's not started...")) except Exception as e: returnD(format_error(e))
def send_scrapy_query(self, action, arguments=None): url = "%s%s.json" % (self.scrapyd, action) method = "POST" headers = None if action.startswith('list'): method = "GET" if arguments: args = [str(k)+'='+str(v) for (k, v) in arguments.iteritems()] url += '?' + '&'.join(args) arguments = None elif arguments: arguments = urlencode(arguments) headers = {'Content-Type': 'application/x-www-form-urlencoded'} try: res = yield getPage(url, method=method, postdata=arguments, \ headers=headers, timeout=30) result = loadjson(res) returnD(result) except ConnectionRefusedError: returnD(format_error("Could not contact scrapyd server, " + \ "maybe it's not started...")) except Exception as e: returnD(format_error(e))