def receive(self): try: if self.pipe.llen(self.internal_queue) > 0: return utils.decode(self.pipe.lindex(self.internal_queue, -1)) return utils.decode(self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0)) except Exception as exc: raise exceptions.PipelineError(exc)
def receive(self): try: if self.pipe.llen(self.internal_queue) > 0: return utils.decode(self.pipe.lindex(self.internal_queue, -1)) return utils.decode(self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0)) except redis.exceptions.ConnectionError: pass # raised e.g. on SIGHUP except Exception as exc: raise exceptions.PipelineError(exc)
def receive(self): try: if self.pipe.llen(self.internal_queue) > 0: return utils.decode(self.pipe.lindex(self.internal_queue, -1)) return utils.decode( self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0)) except redis.exceptions.ConnectionError: pass # raised e.g. on SIGHUP except Exception as exc: raise exceptions.PipelineError(exc)
def __parse(value): try: return utils.decode(DateTime.__parse_utc_isoformat(value)) except ValueError: pass try: value = dateutil.parser.parse(value, fuzzy=True) value = value.astimezone(pytz.utc) value = value.isoformat() except ValueError: return None return utils.decode(value)
def __parse(value: str) -> Optional[str]: try: DateTime.parse_utc_isoformat(value) except ValueError: pass else: return utils.decode(value) try: value = dateutil.parser.parse(value, fuzzy=True) value = value.astimezone(pytz.utc) value = value.isoformat() except (ValueError, OverflowError): return None return utils.decode(value)
def fetch_url(url, timeout=60.0, chunk_size=16384): req = urllib2.urlopen(url, timeout = timeout) iostring = StringIO.StringIO() shutil.copyfileobj(req, iostring, chunk_size) value = iostring.getvalue() iostring.close() return decode(value)
def test_event(self): """ Test Redis output with default parameters. """ redis_ip = self.sysconfig['redis_server_ip'] redis_port = self.sysconfig['redis_server_port'] redis_db = self.sysconfig['redis_db'] redis_queue = self.sysconfig['redis_queue'] redis_password = self.sysconfig['redis_password'] redis_timeout = self.sysconfig['redis_timeout'] redis_conn = redis.ConnectionPool(host=redis_ip, port=redis_port, db=redis_db, password=redis_password) redis_version = tuple(int(x) for x in redis.__version__.split('.')) if redis_version >= (3, 0, 0): redis_class = redis.Redis else: redis_class = redis.StrictRedis redis_output = redis_class(connection_pool=redis_conn, socket_timeout=redis_timeout, password=redis_password) self.run_bot() # Get the message from Redis event = utils.decode(redis_output.lpop(redis_queue)) self.assertIsInstance(event, str) event_dict = json.loads(event) self.assertDictEqual(EXAMPLE_EVENT, event_dict)
def setUpClass(cls): """ Set default values and save original functions. """ cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.allowed_error_count = 0 # allows dumping of some lines cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if cls.bot_type == 'parser' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Report', 'raw': 'Cg==', 'feed.name': 'Test Feed', 'time.observation': '2016-01-01T00:00:00+00:00'} if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message))
def setUpClass(cls): """ Set default values and save original functions. """ cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.config = {} cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message))
def test_event(self): """ Test AMQP Topic output. """ connection = pika.BlockingConnection( pika.ConnectionParameters( host='localhost', port=5672, socket_timeout=10, virtual_host='/', )) channel = connection.channel() channel.confirm_delivery() channel.queue_declare(queue='test', durable=True, arguments={'x-queue-mode': 'lazy'}) self.run_bot() # Get the message from AMQP method, header, body = next(channel.consume('test')) event = utils.decode(body) channel.basic_ack(delivery_tag=method.delivery_tag) self.assertIsInstance(event, str) event_dict = json.loads(event) self.assertDictEqual(EXAMPLE_EVENT, event_dict)
def serialize(self): #logger = utils.log("message log",log_level='DEBUG') #logger.info("set type serialize" + str(self.__class__.__name__)) self['__type'] = self.__class__.__name__ json_dump = utils.decode(json.dumps(self)) del self['__type'] return json_dump
def test_event(self): """ Setup Redis connection """ redis_ip = self.sysconfig['redis_server_ip'] redis_port = self.sysconfig['redis_server_port'] redis_db = self.sysconfig['redis_db'] redis_queue = self.sysconfig['redis_queue'] redis_password = self.sysconfig['redis_password'] redis_timeout = self.sysconfig['redis_timeout'] redis_conn = redis.ConnectionPool(host=redis_ip, port=redis_port, db=redis_db, password=redis_password) redis_version = tuple(int(x) for x in redis.__version__.split('.')) if redis_version >= (3, 0, 0): redis_class = redis.Redis else: redis_class = redis.StrictRedis redis_output = redis_class(connection_pool=redis_conn, socket_timeout=redis_timeout, password=redis_password) self.run_bot() # Get the message from Redis event = utils.decode(redis_output.lpop(redis_queue)) self.assertIsInstance(event, str) event_dict = json.loads(event) self.assertDictEqual(EXAMPLE_EVENT_JSON, event_dict)
def setUpClass(cls): """ Set default values and save original functions. """ cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.config = {} cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if cls.bot_type == 'parser' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Report', 'raw': 'Cg==', 'feed.name': 'Test Feed', 'time.observation': '2016-01-01T00:00'} if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message))
def parse_line(self, row, report): # Get IP Address and Type info1 = re.search( r">[\ ]*(\d+\.\d+\.\d+\.\d+)[\ ]*<.*</td><td>([^<]+)</td>", row) if not info1: return # abort if no IP address found # Get Timestamp info2 = re.search( r"<td>[\ ]*(\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2})[\ ]*</td>", row) cc_search = re.search(r'([a-z]+).gif"', row) event = self.new_event(report) description = info1.group(2) event_type = self.get_type( description) # without decoding here, b/c of the unicode signs description = utils.decode(description) time_source = info2.group(1) + " UTC-8" event.add("time.source", time_source) event.add("source.ip", info1.group(1)) event.add('classification.type', event_type) event.add('event_description.text', description) if cc_search: event.add('source.geolocation.cc', cc_search.group(1)) event.add("raw", row) yield event
def process(self): self.logger.info("Connecting to stream at %r.", self.parameters.http_url) try: req = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert, cert=self.ssl_client_cert, stream=True, timeout=self.http_timeout_sec) except requests.exceptions.ConnectionError: self.logger.exception('Connection Failed.') else: if req.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(req.status_code)) for line in req.iter_lines(): if self.parameters.strip_lines: line = line.strip() if not line: # filter out keep-alive new lines and empty lines continue report = self.new_report() report.add("raw", decode(line)) report.add("feed.url", self.parameters.http_url) self.send_message(report) self.logger.info('Stream stopped.')
def process(self): report = self.receive_message() raw_report = utils.base64_decode(report.get("raw")) for row in raw_report.split('<tr>'): # Get IP and Type info1 = re.search( ">[\ ]*(\d+\.\d+\.\d+\.\d+)[\ ]*<.*</td><td>([^<]+)</td>", row) if not info1: continue # Get Timestamp info2 = re.search( "<td>[\ ]*(\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2})[\ ]*</td>", row) event = self.new_event(report) description = info1.group(2) description = utils.decode(description) event_type = self.get_type(description) time_source = info2.group(1) + " UTC-8" event.add("time.source", time_source) event.add("source.ip", info1.group(1)) event.add('classification.type', event_type) event.add('event_description.text', description) event.add("raw", row) self.send_message(event) self.acknowledge_message()
def setUpClass(cls): """ Set default values and save original functions. """ if not utils.drop_privileges(): raise ValueError( 'IntelMQ and IntelMQ tests must not run as root for security reasons. ' 'Dropping privileges did not work.') cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.use_cache = False cls.allowed_warning_count = 0 cls.allowed_error_count = 0 # allows dumping of some lines cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if cls.bot_type == 'parser' and cls.default_input_message == '': cls.default_input_message = { '__type': 'Report', 'raw': 'Cg==', 'feed.name': 'Test Feed', 'time.observation': '2016-01-01T00:00:00+00:00' } elif cls.bot_type != 'collector' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Event'} if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message)) if cls.use_cache and not os.environ.get('INTELMQ_SKIP_REDIS'): password = os.environ.get('INTELMQ_TEST_REDIS_PASSWORD') or \ (BOT_CONFIG['redis_cache_password'] if 'redis_cache_password' in BOT_CONFIG else None) cls.cache = redis.Redis( host=BOT_CONFIG['redis_cache_host'], port=BOT_CONFIG['redis_cache_port'], db=BOT_CONFIG['redis_cache_db'], socket_timeout=BOT_CONFIG['redis_cache_ttl'], password=password, ) elif cls.use_cache and os.environ.get('INTELMQ_SKIP_REDIS'): cls.skipTest(cls, 'Requested cache requires deactivated Redis.')
def receive(self): """ Receives the last not yet acknowledged message. Does not block unlike the other pipelines. """ if len(self.state.get(self.internal_queue, [])) > 0: return utils.decode(self.state[self.internal_queue].pop(0)) first_msg = self.state[self.source_queue].pop(0) if self.internal_queue in self.state: self.state[self.internal_queue].append(first_msg) else: self.state[self.internal_queue] = [first_msg] return utils.decode(first_msg)
def receive(self) -> str: """ Receives the last not yet acknowledged message. Does not block unlike the other pipelines. """ if len(self.state.get(self.internal_queue, [])) > 0: return utils.decode(self.state[self.internal_queue].pop(0)) first_msg = self.state[self.source_queue].pop(0) if self.internal_queue in self.state: self.state[self.internal_queue].append(first_msg) else: self.state[self.internal_queue] = [first_msg] return utils.decode(first_msg)
def __parse(value): try: value = dateutil.parser.parse(value) value = value.astimezone(pytz.utc) value = value.isoformat() except ValueError: return None return utils.decode(value)
def receive(self) -> str: if self._has_message: raise exceptions.PipelineError("There's already a message, first " "acknowledge the existing one.") retval = self._receive() self._has_message = True return utils.decode(retval)
def receive(self): try: retval = self.pipe.lindex(self.internal_queue, -1) # returns None if no value if not retval: retval = self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0) return utils.decode(retval) except Exception as exc: raise exceptions.PipelineError(exc)
def get_output_queue(self, path="_default"): """Getter for items in the output queues of this bot. Use in TestCase scenarios If there is multiple queues in named queue group, we return all the items chained. """ return [ utils.decode(text) for text in chain(*[ self.pipe.state[x] for x in self.pipe.destination_queues[path] ]) ]
def receive(self) -> str: if self.source_queue is None: raise exceptions.ConfigurationError('pipeline', 'No source queue given.') try: method, header, body = next(self.channel.consume(self.source_queue)) if method: self.delivery_tag = method.delivery_tag return utils.decode(body) except Exception as exc: raise exceptions.PipelineError(exc)
def get_output_queue(self): """Getter for the input queue of this bot. Use in TestCase scenarios""" return [utils.decode(text) for text in self.pipe.state["%s-output" % self.bot_id]] # """ Test if all pipes are created with correct names. """ pipenames = ["{}-input", "{}-input-internal", "{}-output"] self.assertSetEqual({x.format(self.bot_id) for x in pipenames}, set(self.pipe.state.keys()))
def __query(query): try: for query_result in dns.resolver.query(query, rdtype='TXT'): fp = StringIO.StringIO() query_result.to_wire(fp) value = fp.getvalue()[1:] # ignore first character fp.close() return decode(value, force=True) except dns.exception.DNSException: return None
def __query(query): try: for query_result in dns.resolver.query(query, rdtype='TXT'): fp = io.BytesIO() query_result.to_wire(fp) value = fp.getvalue()[1:] # ignore first character fp.close() yield utils.decode(value) except dns.exception.DNSException: return None
def receive(self): if self.source_queue is None: raise exceptions.ConfigurationError('pipeline', 'No source queue given.') try: retval = self.pipe.lindex(self.internal_queue, -1) # returns None if no value if not retval: retval = self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0) return utils.decode(retval) except Exception as exc: raise exceptions.PipelineError(exc)
def receive(self) -> str: if self.source_queue is None: raise exceptions.ConfigurationError('pipeline', 'No source queue given.') try: method, header, body = next(self.channel.consume( self.source_queue)) if method: self.delivery_tag = method.delivery_tag return utils.decode(body) except Exception as exc: raise exceptions.PipelineError(exc)
def fetch_url(url, timeout=60.0, chunk_size=16384, http_proxy = None, https_proxy = None): if http_proxy and https_proxy: proxy = urllib2.ProxyHandler({'http': http_proxy, 'https': https_proxy }) opener = urllib2.build_opener(proxy) urllib2.install_opener(opener) req = urllib2.urlopen(url, timeout = timeout) iostring = StringIO.StringIO() shutil.copyfileobj(req, iostring, chunk_size) value = iostring.getvalue() iostring.close() return decode(value)
def setUpClass(cls): """ Set default values and save original functions. """ cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.use_cache = False cls.allowed_warning_count = 0 cls.allowed_error_count = 0 # allows dumping of some lines cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if cls.bot_type == 'parser' and cls.default_input_message == '': cls.default_input_message = { '__type': 'Report', 'raw': 'Cg==', 'feed.name': 'Test Feed', 'time.observation': '2016-01-01T00:00:00+00:00' } elif cls.bot_type != 'collector' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Event'} if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message)) if cls.use_cache and not os.environ.get('INTELMQ_SKIP_REDIS'): cls.cache = redis.Redis( host=BOT_CONFIG['redis_cache_host'], port=BOT_CONFIG['redis_cache_port'], db=BOT_CONFIG['redis_cache_db'], socket_timeout=BOT_CONFIG['redis_cache_ttl'])
def _receive(self) -> bytes: """ Receives the last not yet acknowledged message. Does not block unlike the other pipelines. """ if len(self.state[self.internal_queue]) > 0: return utils.decode(self.state[self.internal_queue][0]) try: first_msg = self.state[self.source_queue].pop(0) except IndexError as exc: raise exceptions.PipelineError(exc) self.state[self.internal_queue].append(first_msg) return first_msg
def process(self): event = self.receive_message() ed = event.to_dict() dt = dateutil.parser.parse(ed['time']['observation']) feed = ed['feed']['name'] feeddir = os.path.join(self.dirname, feed) if not os.path.exists(feeddir): os.mkdir(feeddir) event_dict = {} if self.fields: for key in self.fields: dotdictcopy(ed, event_dict, key) else: event_dict = ed if self.parameters.tsv: event_data = ",".join(val for val in dictvals(event_dict)) else: event_data = utils.decode(json.dumps(event_dict, ensure_ascii=False)) # TODO: better idiom if self.parameters.gzip: filename = os.path.join(feeddir, "{}.txt.gz".format(dt.strftime(self.TIMEFMT))) with gzip.open(filename, 'a') as f: f.write(bytes(event_data + '\n', 'UTF-8')) else: filename = os.path.join(feeddir, "{}.txt".format(dt.strftime(self.TIMEFMT))) with open(filename, 'a') as f: f.write(event_data + '\n') # Rotate expired archives # XXX: plain sorting, careful with the dates (TIMEFMT) dirlist = sorted(os.listdir(feeddir)) for expfn in dirlist[:max(len(dirlist)-self.items, 0)]: exppath = os.path.join(feeddir, expfn) os.remove(exppath) self.logger.info("Expired archive %s removed", exppath) self.acknowledge_message()
def receive(self): if self.source_queue is None: raise exceptions.ConfigurationError('pipeline', 'No source queue given.') try: while True: try: retval = self.pipe.lindex(self.internal_queue, -1) # returns None if no value except redis.exceptions.BusyLoadingError: # Just wait at redis' startup #1334 time.sleep(1) else: break if not retval: retval = self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0) return utils.decode(retval) except Exception as exc: raise exceptions.PipelineError(exc)
def setUpClass(cls): """ Set default values and save original functions. """ cls.bot_id = 'test-bot' cls.bot_name = None cls.bot = None cls.bot_reference = None cls.bot_type = None cls.default_input_message = '' cls.input_message = None cls.loglines = [] cls.loglines_buffer = '' cls.log_stream = None cls.maxDiff = None # For unittest module, prints long diffs cls.pipe = None cls.sysconfig = {} cls.use_cache = False cls.allowed_warning_count = 0 cls.allowed_error_count = 0 # allows dumping of some lines cls.set_bot() cls.bot_name = cls.bot_reference.__name__ if cls.bot_type is None: for type_name, type_match in cls.bot_types.items(): if cls.bot_name.endswith(type_match): cls.bot_type = type_name break if cls.bot_type == 'parser' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Report', 'raw': 'Cg==', 'feed.name': 'Test Feed', 'time.observation': '2016-01-01T00:00:00+00:00'} elif cls.bot_type != 'collector' and cls.default_input_message == '': cls.default_input_message = {'__type': 'Event'} if type(cls.default_input_message) is dict: cls.default_input_message = \ utils.decode(json.dumps(cls.default_input_message)) if cls.use_cache and not os.environ.get('INTELMQ_SKIP_REDIS'): cls.cache = redis.Redis(host=BOT_CONFIG['redis_cache_host'], port=BOT_CONFIG['redis_cache_port'], db=BOT_CONFIG['redis_cache_db'], socket_timeout=BOT_CONFIG['redis_cache_ttl'])
def process(self): try: req = requests.get(self.parameters.url, stream=True) except requests.exceptions.ConnectionError: raise ValueError('Connection Failed.') else: for line in req.iter_lines(): if self.parameters.strip_lines: line = line.strip() if not line: # filter out keep-alive new lines and empty lines continue report = self.new_report() report.add("raw", decode(line)) self.send_message(report) self.logger.info('Stream stopped.')
def receive(self) -> str: if self.source_queue is None: raise exceptions.ConfigurationError('pipeline', 'No source queue given.') try: while True: try: retval = self.pipe.lindex(self.internal_queue, -1) # returns None if no value except redis.exceptions.BusyLoadingError: # Just wait at redis' startup #1334 time.sleep(1) else: break if not retval: retval = self.pipe.brpoplpush(self.source_queue, self.internal_queue, 0) return utils.decode(retval) except Exception as exc: raise exceptions.PipelineError(exc)
def process(self): self.logger.info("Connecting to stream at %r.", self.parameters.http_url) try: req = self.session.get(url=self.parameters.http_url, stream=True) except requests.exceptions.ConnectionError: self.logger.exception('Connection Failed.') else: if req.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(req.status_code)) try: for line in req.iter_lines(): if self.parameters.strip_lines: line = line.strip() if not line: # filter out keep-alive new lines and empty lines continue self.__error_count = 0 report = self.new_report() report.add("raw", decode(line)) report.add("feed.url", self.parameters.http_url) self.send_message(report) self.__error_count = 0 except (requests.exceptions.ChunkedEncodingError, ProtocolError, IncompleteRead, ReadTimeoutError) as exc: self.__error_count += 1 if (self.__error_count > self.parameters.error_max_retries): self.__error_count = 0 raise else: self.logger.info( 'Got exception %r, retrying (consecutive error count %d <= %d).', exc, self.__error_count, self.parameters.error_max_retries) self.logger.info('Stream stopped.')
def test_event(self): """ Setup Redis connection """ redis_ip = test.BOT_CONFIG['redis_server_ip'] redis_port = test.BOT_CONFIG['redis_server_port'] redis_db = test.BOT_CONFIG['redis_db'] redis_queue = test.BOT_CONFIG['redis_queue'] redis_password = test.BOT_CONFIG['redis_password'] redis_timeout = test.BOT_CONFIG['redis_timeout'] redis_conn = redis.ConnectionPool(host=redis_ip, port=redis_port, db=redis_db) redis_output = redis.StrictRedis(connection_pool=redis_conn, socket_timeout=redis_timeout, password=redis_password) self.run_bot() """ Get the message from Redis """ event = utils.decode(redis_output.lpop(redis_queue)) """ "assertMessageEqual" """ self.assertIsInstance(event, str) event_dict = json.loads(event) self.assertDictEqual(EXAMPLE_EVENT, event_dict)
def fetch_url_ssl(url, key_file, cert_file, ca_file, timeout=60.0, chunk_size=16384): regex = '([^:]+)(:([0-9]+))?' url_parsed = urlparse(url) host_port = re.search(regex, url_parsed.netloc) host = host_port.group(1) port = host_port.group(3) if not port: port = 443 connection = HTTPSClientAuthConnection(host, port, key_file=key_file, cert_file=cert_file, ca_file=ca_file, timeout=60.0) connection.request('GET', url_parsed.path) iostring = StringIO.StringIO() shutil.copyfileobj(connection.getresponse(), iostring, chunk_size) value = iostring.getvalue() iostring.close() connection.close() return decode(value)
def test_event(self): """ Setup Redis connection """ redis_ip = self.sysconfig['redis_server_ip'] redis_port = self.sysconfig['redis_server_port'] redis_db = self.sysconfig['redis_db'] redis_queue = self.sysconfig['redis_queue'] redis_password = self.sysconfig['redis_password'] redis_timeout = self.sysconfig['redis_timeout'] redis_conn = redis.ConnectionPool(host=redis_ip, port=redis_port, db=redis_db) redis_output = redis.StrictRedis(connection_pool=redis_conn, socket_timeout=redis_timeout, password=redis_password) self.run_bot() # Get the message from Redis event = utils.decode(redis_output.lpop(redis_queue)) self.assertIsInstance(event, str) event_dict = json.loads(event) self.assertDictEqual(EXAMPLE_EVENT, event_dict)
def get_output_queue(self): """Getter for the input queue of this bot. Use in TestCase scenarios""" return [utils.decode(text) for text in self.pipe.state["%s-output" % self.bot_id]]
def to_json(self): json_dict = self.to_dict() return utils.decode(json.dumps(json_dict, ensure_ascii=False))
def serialize(self): self['__type'] = self.__class__.__name__ json_dump = utils.decode(json.dumps(self)) del self['__type'] return json_dump
def generate_datetime_now(): value = datetime.datetime.now(pytz.timezone('UTC')) value = value.replace(microsecond=0) value = value.isoformat() # Is byte string in 2 and unicode string in 3, make unicode string return utils.decode(value)
def get(self, key: str): retval = self.redis.get(key) if isinstance(retval, bytes): return utils.decode(retval) return retval
def get_output_queue(self): """Getter for the input queue of this bot. Use in TestCase scenarios""" return [ utils.decode(text) for text in self.pipe.state["%s-output" % self.bot_id] ]
def get_output_queue(self, path="_default"): """Getter for items in the output queues of this bot. Use in TestCase scenarios If there is multiple queues in named queue group, we return all the items chained. """ return [utils.decode(text) for text in chain(*[self.pipe.state[x] for x in self.pipe.destination_queues[path]])]