def _dict_to_event(d): if (not 'Event-Id' in d or not 'Source-Id' in d or not 'Syntax' in d): raise ZtreamyException('Missing headers in event', 'event_deserialize') if not 'Body' in d: raise ZtreamyException('Missing body in event', 'event_deserialize') if isinstance(d['Body'], dict): body = d['Body'] else: body = d['Body'].encode('utf-8') if ('Aggregator-Ids' in d and not isinstance(d['Aggregator-Ids'], list)): raise ZtreamyException('Incorrect Aggregator-Id data', 'event_deserialize') extra_headers = {} for header in d: if not header in Event.headers and header != 'Body': extra_headers[header] = d[header] event = Event.create(d['Source-Id'], d['Syntax'], body, event_id=d['Event-Id'], application_id=d.get('Application-Id'), aggregator_id=d.get('Aggregator-Ids', []), event_type=d.get('Event-Type'), timestamp=d.get('Timestamp'), extra_headers=extra_headers) return event
def _parse_body(self, body): if self.syntax == 'text/n3': return self._parse_body_rdflib(body, syntax='n3') elif self.syntax == 'application/ld+json': return self._parse_body_rdflib(body, syntax='json-ld') else: raise ZtreamyException('Unsupported syntax', 'event_syntax')
def serialize_body(self): if self.syntax == 'text/n3': return self.body.serialize(format='n3') elif self.syntax == 'application/ld+json': return self.body.serialize(format='json-ld') else: raise ZtreamyException('Bad RDFEvent syntax', 'event_serialize')
def deserialize(self, data, parse_body=True, complete=False): """Deserializes and returns a list of events. Deserializes all the events until no more events can be parsed from the data stored in this deserializer object. The remaining data is kept for being parsed in future calls to this method. If 'data' is provided, it is appended to the data buffer. It may be None. If 'parse_body' is True (the default value), the parser will deserialize also the body of the events according to their types. If not, their body will be stored only in their serialized form (as a string). The list of deserialized event objects is returned. The list is empty when no events are deserialized. """ if data is not None: self.append_data(data) events = [] event = self.deserialize_next(parse_body=parse_body) while event is not None: events.append(event) event = self.deserialize_next(parse_body=parse_body) if complete and len(self._data) > 0: self.reset() raise ZtreamyException('Spurious data in the input event', 'event_deserialize') return events
def __init__(self, callback, sparql_query): """Creates a SPARQL filter for RDF triples.""" if sparql_query.strip()[:3].lower() != 'ask': raise ZtreamyException('Only ASK queries are allowed ' 'in SPARQLFilter') super(SPARQLFilter, self).__init__(callback) self.query = rdfextras.sparql.parser.parse(sparql_query)
def __init__(self, source_id, syntax, command, **kwargs): """Creates a new command event. `command` must be the textual representation of the command or provide that textual representation through `str()`. It will be the body of the event. """ if syntax != 'ztreamy-command': raise ZtreamyException('Usupported syntax in Command', 'programming') super(Command, self).__init__(source_id, syntax, None, **kwargs) self.body = command self.command = command if not command in Command.valid_commands: raise ZtreamyException('Usupported command ' + command, 'programming')
def get_scheduler(description, initial_delay=0.0): pos = description.find('[') if pos == -1 or description[-1] != ']': raise ZtreamyException('error in distribution specification', 'event_source params') distribution = description[:pos].strip() params = [float(num) for num in description[pos + 1:-1].split(',')] if distribution == 'exp': if len(params) != 1: raise ZtreamyException('exp distribution needs 1 param', 'event_source params') return exponential_event_scheduler(params[0], initial_delay=initial_delay) elif distribution == 'const': if len(params) != 1: raise ZtreamyException('const distribution needs 1 param', 'event_source params') return constant_event_scheduler(params[0], initial_delay=initial_delay)
def __init__(self, source_id, syntax, body, **kwargs): if not syntax in JSONEvent.supported_syntaxes: raise ZtreamyException('Usupported syntax in JSONEvent', 'programming') super(JSONEvent, self).__init__(source_id, syntax, None, **kwargs) if isinstance(body, basestring): self.body = self._parse_body(body) else: self.body = body
def _update_header(self, header, value): if header not in Event.headers: self._extra_headers[header] = value elif header == 'Aggregator-Ids': self._event[header] = value.split(',') elif header not in self._event: self._event[header] = value else: raise ZtreamyException('Duplicate header in event', 'event_deserialize')
def serialize_body(self): """Returns a string representation of the body of the event. Raises a `ZtreamyException` if the body is None. This method should be overriden by subclasses in order to provide a syntax-specific serialization. """ if self.body is not None: return str(self.body) else: raise ZtreamyException('Empty body in event', 'event_serialize')
def __init__(self, source_id, syntax, body, **kwargs): """Creates a new event. `body` must be the textual representation of the event or provide that textual representation through `str()`. """ if not syntax in RDFEvent.supported_syntaxes: raise ZtreamyException('Usupported syntax in RDFEvent', 'programming') super(RDFEvent, self).__init__(source_id, syntax, None, **kwargs) if isinstance(body, rdflib.Graph): self.body = body else: self.body = self._parse_body(body)
def __init__(self, source_id, syntax, body, sequence_num=0, **kwargs): """Creates a new command event. ``sequence_num`` represents the sequence number (integer) of the event, and is transmitted in its body along with the timestamp. It is only used when body is None. If ``body`` is not None, the sequence number is read from ``body`` instead. """ if syntax != 'ztreamy-test': raise ZtreamyException('Usupported syntax in TestEvent', 'programming') super(TestEvent, self).__init__(source_id, syntax, None, **kwargs) if body is not None: self._parse_body(body) parts = self.extra_headers['X-Float-Timestamp'].split('/') self.float_time = float(parts[1]) self.sequence_num = int(parts[0]) else: self.float_time = time.time() self.sequence_num = sequence_num self.extra_headers['X-Float-Timestamp'] = \ str(sequence_num) + '/' + str(self.float_time)
def deserialize_next(self, parse_body=True): """Deserializes and returns an event from the data buffer. Returns None and keeps the pending data stored when a complete event is not in the stored data fragment. If 'parse_body' is True (the default value), the parser will deserialize also the body of the events according to their types. If not, their body will be stored only in their serialized form (as a string). """ # Read headers pos = 0 while not self._header_complete and pos < len(self._data): end = self._data.find('\n', pos) if end == -1: self._data = self._data[pos:] return None part = self._data[pos:end] pos = end + 1 if part == '': self._header_complete = True break comps = part.split(':') if len(comps) < 2: raise ZtreamyException('Event syntax error', 'event_deserialize') header = comps[0].strip() value = part[len(comps[0]) + 1:].strip() self._update_header(header, value) if not self._header_complete: self._data = self._data[pos:] return None if not 'Body-Length' in self._event: body_length = 0 else: body_length = int(self._event['Body-Length']) if (not 'Event-Id' in self._event or not 'Source-Id' in self._event or not 'Syntax' in self._event): raise ZtreamyException('Missing headers in event', 'event_deserialize') end = pos + int(body_length) if end > len(self._data): self._data = self._data[pos:] return None body = self._data[pos:end] self._data = self._data[end:] if parse_body or self._event['Syntax'] in Event._always_parse: event = Event.create( \ self._event.get('Source-Id'), self._event.get('Syntax'), body, event_id=self._event.get('Event-Id'), application_id=self._event.get('Application-Id'), aggregator_id=self._event.get('Aggregator-Ids', []), event_type=self._event.get('Event-Type'), timestamp=self._event.get('Timestamp'), extra_headers=self._extra_headers) else: event = Event( \ self._event.get('Source-Id'), self._event.get('Syntax'), body, event_id=self._event.get('Event-Id'), application_id=self._event.get('Application-Id'), aggregator_id=self._event.get('Aggregator-Ids', []), event_type=self._event.get('Event-Type'), timestamp=self._event.get('Timestamp'), extra_headers=self._extra_headers) self._event_reset() return event
def _type_uri(self): if self.entry_type in LogEntry._entry_types: return LogEntry._entry_types[self.entry_type] else: raise ZtreamyException('Unknown event type')
def dispatch(self, evs): num_clients = (len(self.streaming_clients) + len(self.one_time_clients) + len(self.unsynced_compressed_streaming_clients) + len(self.compressed_streaming_clients) + len(self.local_clients)) logging.info('Sending %r events to %r clients', len(evs), num_clients) self.recent_events.append_events(evs) self._next_client_cleanup -= 1 if self._next_client_cleanup == 0: self.clean_closed_clients() if isinstance(evs, list): if evs == []: self._periods_since_last_event += 1 if self._periods_since_last_event > 20 and self._auto_finish: logger.logger.server_closed(num_clients) self.close() self.ioloop.stop() # Use the following line for the experiments ## if False: elif ((num_clients > 0 or len(self.priority_clients) > 0) and self._periods_since_last_event > 20): logging.info('Sending Test-Connection event') evs = [ events.Command('', 'ztreamy-command', 'Test-Connection') ] self._periods_since_last_event = 0 self.dispatch_priority(evs) else: return else: raise ZtreamyException('Bad event type', 'send_event') self._periods_since_last_event = 0 if len(self.unsynced_compressed_streaming_clients) > 0: if (len(self.compressed_streaming_clients) == 0 or self._num_events_since_sync > param_max_events_sync): self._sync_compressor() if num_clients > 0: logging.info('Compressed clients: %d synced; %d unsynced'%\ (len(self.compressed_streaming_clients), len(self.unsynced_compressed_streaming_clients))) serialized = ztreamy.serialize_events(evs) for client in self.streaming_clients: self._send(serialized, client) for client in self.unsynced_compressed_streaming_clients: self._send(serialized, client) for client in self.local_clients: if not client.closed: client._send_events(evs) for client in self.one_time_clients: self._send(serialized, client) client.close() if len(self.compressed_streaming_clients) > 0: compressed_data = (self._compressor.compress(serialized) + self._compressor.flush(zlib.Z_SYNC_FLUSH)) for client in self.compressed_streaming_clients: self._send(compressed_data, client) for e in evs: logger.logger.event_dispatched(e) self.one_time_clients = [] self._num_events_since_sync += len(evs)