def jsonrpc_server_call(target, jsonrpc_request, json_decoder=None): """Execute the given JSON-RPC request (as JSON-encoded string) on the given target object and return the JSON-RPC response, as a dict """ if json_decoder is None: json_decoder = ScrapyJSONDecoder() try: req = json_decoder.decode(jsonrpc_request) except Exception as e: return jsonrpc_error(None, jsonrpc_errors.PARSE_ERROR, 'Parse error', \ traceback.format_exc()) try: id, methname = req['id'], req['method'] except KeyError: return jsonrpc_error(None, jsonrpc_errors.INVALID_REQUEST, 'Invalid Request') try: method = getattr(target, methname) except AttributeError: return jsonrpc_error(id, jsonrpc_errors.METHOD_NOT_FOUND, 'Method not found') params = req.get('params', []) a, kw = ([], params) if isinstance(params, dict) else (params, {}) kw = dict([(str(k), v) for k, v in kw.items()]) # convert kw keys to str try: return jsonrpc_result(id, method(*a, **kw)) except Exception as e: return jsonrpc_error(id, jsonrpc_errors.INTERNAL_ERROR, str(e), \ traceback.format_exc())
def server_call(target, json_request, json_decoder=None): if json_decoder is None: json_decoder = ScrapyJSONDecoder() try: req = json_decoder.decode(json_request) logger.info(str(req)) except Exception as e: return control_error('Parse error', traceback.format_exc()) try: methname = req['method'] except KeyError: return control_error('Invalid Request') try: method = getattr(target, methname) except AttributeError: return control_error('Method not found') params = req.get('params', []) a, kw = ([], params) if isinstance(params, dict) else (params, {}) kw = dict([(str(k), v) for k, v in kw.items()]) # convert kw keys to str try: return control_result(method(*a, **kw)) except Exception as e: return control_error(str(e), traceback.format_exc())
def jsonrpc_server_call(target, jsonrpc_request, json_decoder=None): """Execute the given JSON-RPC request (as JSON-encoded string) on the given target object and return the JSON-RPC response, as a dict """ if json_decoder is None: json_decoder = ScrapyJSONDecoder() try: req = json_decoder.decode(jsonrpc_request) except Exception, e: return jsonrpc_error(None, jsonrpc_errors.PARSE_ERROR, 'Parse error', \ traceback.format_exc())
class JsonRpcResource(JsonResource): json_decoder = ScrapyJSONDecoder() def __init__(self, target=None): JsonResource.__init__(self) self._target = target def render_GET(self, txrequest): return self.get_target() def render_POST(self, txrequest): reqstr = txrequest.content.getvalue() target = self.get_target() return jsonrpc_server_call(target, reqstr, self.json_decoder) def getChild(self, name, txrequest): target = self.get_target() try: newtarget = getattr(target, name) return JsonRpcResource(newtarget) except AttributeError: return error.NoResource("No such child resource.") def get_target(self): return self._target
def setUp(self): self.spider1 = Spider('name1') self.spider2 = Spider('name2') open_spiders = set([self.spider1, self.spider2]) crawler = CrawlerMock(open_spiders) self.spref = SpiderReferencer(crawler) self.encoder = ScrapyJSONEncoder(spref=self.spref) self.decoder = ScrapyJSONDecoder(spref=self.spref)
class AdSpiderBase(RedisSpider): task_encoder = ScrapyJSONEncoder().encode task_decoder = ScrapyJSONDecoder().decode def next_request(self): serialized_task = self.server.rpop(self.redis_key) if serialized_task: self.logger.info("Got task {}".format(serialized_task)) return self.make_request_from_task(serialized_task, callback=self.parse, dont_filter=False) @staticmethod def tagfilter(tag): return isinstance(tag, Tag)
def setup_class(cls): cls.broker = os.getenv('KAFKA_BROKER') if not cls.topic: topic = "%s-%s" % ('topic_test_', random_string(10)) cls.topic = topic create_topic(cls.topic) cls._deserializer = ScrapyJSONDecoder() cls.consumer = KafkaConsumer(bootstrap_servers=[cls.broker], auto_offset_reset='earliest', group_id=None, value_deserializer=lambda x: cls. _deserializer.decode(x.decode('utf8'))) cls.consumer.subscribe([cls.topic])
def __init__(self, crawler, target=None): JsonResource.__init__(self, crawler, target) self.json_decoder = ScrapyJSONDecoder(crawler=crawler) self.crawler = crawler self._target = target
def setUp(self): crawler = CrawlerMock([]) self.json_decoder = ScrapyJSONDecoder(crawler=crawler)