class XBeeFrameBase(Block): """ Generate or interpret XBee frames Parameters: escaped (bool): True uses API mode 2 digimesh (bool): Use DigiMesh protocol rather than XBee (IEEE 802.15.4) """ version = VersionProperty(version='1.0.0') escaped = BoolProperty(title='Escaped characters? (API mode 2)', default=True) digimesh = BoolProperty(title='DigiMesh', default=False) def __init__(self): super().__init__() self._xbee = None self._serial = None self._protocol = xbee.XBee def configure(self, context): super().configure(context) if self.digimesh(): self._protocol = xbee.DigiMesh self._connect() def process_signals(self, signals): for signal in signals: pass def stop(self): try: self.logger.debug('Halting XBee callback thread') self._xbee.halt() self.logger.debug('XBee halted') except: self.logger.exception('Exception while halting xbee') super().stop() def _connect(self): ''' Establish XBee serial connection ''' try: self._serial = serial.Serial(None) self.logger.debug('Escaped is' ': {}'.format(self.escaped())) try: self._xbee = self._protocol(self._serial, escaped=self.escaped()) except: self.logger.exception('An exception occurred') except: self.logger.exception('An failure occurred') def _API_frame_packer(self, data): return xbee.frame.APIFrame(data, self.escaped()).output() def _API_frame_unpacker(self, data): frame = xbee.frame.APIFrame(escaped=self.escaped()) for byte in data: frame.fill(bytes([byte])) frame.parse() return self._xbee._split_response(frame.data)
class AquaCheck(Block): signalName = StringProperty(title='Signal Name', default='default') portNumber = StringProperty(title='UART Port', default='/dev/ttymxc4') sendMarking = BoolProperty(default=False, title='Send Marking') rs485 = BoolProperty(default=False, title='Hardware RS485 Port') version = VersionProperty('0.0.1') def configure(self, context): super().configure(context) self.logger.debug("Got here with {}".format(self.portNumber())) self.AQ = SDI12AquaCheck(self.portNumber(), sendMarking=self.sendMarking(), rs485=self.rs485()) def process_signals(self, signals): for signal in signals: if self.AQ.pollProbe(0) == 0: #TODO: Add polling for temperature value = self.AQ.moistureData results = {self.signalName(): value} self.logger.debug("Got results: {}".format(results)) try: self.notify_signals([Signal(results)]) except: self.logger.exception("Signal is not valid:" " {}".format(results))
class DatabaseConnection(PropertyHolder): server = StringProperty(title='Server', default='localhost', order=0) port = IntProperty(title='Port', default=9925, order=1) ssl = BoolProperty(title='Use SSL', default=False, order=2) allow_redirects = BoolProperty(title='Allow Redirects', default=False, order=3) timeout = IntProperty(title='Timeout', default=15, order=4)
class TwitterUserStream(TwitterStreamBlock): """ A block for communicating with the User Twitter Streaming API. Reads user events in real time, notifying other blocks via NIO's signal interface at a configurable interval. Properties: notify_freq (timedelta): The interval between signal notifications. creds: Twitter app credentials, see above. Defaults to global settings. rc_interval (timedelta): Time to wait between receipts (either tweets or hearbeats) before attempting to reconnect to Twitter Streaming. """ version = VersionProperty("2.0.0") only_user = BoolProperty(title="Only User Information", default=True) show_friends = BoolProperty(title="Include Friends List", default=False) streaming_host = 'userstream.twitter.com' streaming_endpoint = '1.1/user.json' def get_params(self): params = {'stall_warnings': 'true', 'delimited': 'length'} if self.only_user: params['with'] = 'user' return params def get_request_method(self): return "GET" def filter_results(self, data): if 'friends' in data: if self.show_friends: return data return None return data def create_signal(self, data): if data and 'event' in data: self.logger.debug('Event message') with self._get_result_lock('events'): self._result_signals['events'].append(Signal(data)) else: self.logger.debug('Other message') data = self.filter_results(data) if data: with self._get_result_lock('other'): self._result_signals['other'].append(Signal(data))
class Lifx(Block): version = VersionProperty('0.1.0') mac = StringProperty(title='MAC address', default='[[LIFX_MAC]]') ip = StringProperty(title='IP Address', default='[[LIFX_IP]]') power = IntProperty(title='1 for on 0 for off', default=0) hue = IntProperty(title='Hue (0-65535)', default=0) sat = IntProperty(title='Saturation (0-65535)', default=0) bri = IntProperty(title='Brightness (0-65535)', default=65535) kelvin = IntProperty(title='Kelvin (2500-9000)', default=3500) kill_switch = BoolProperty(title='Turn off Light at Service Stop?', default=True, advanced=True) def configure(self, context): super().configure(context) self.bulb = Light(self.mac(), self.ip()) def process_signals(self, signals): for signal in signals: if self.power(signal) == 0: brightness = 0 else: brightness = self.bri(signal) self.bulb.set_power(True) self.bulb.set_color([self.hue(signal), self.sat(signal), brightness, self.kelvin(signal)]) pass self.notify_signals(signals) def stop(self): if self.kill_switch(): self.bulb.set_power(False) super().stop()
class StateChange(StateBase): """ Notifies a signal on *state* change. Maintains a *state*. When *state* changes, a signal is notified that containes the *state* and *prev_state*. *state* is set by the *state_expr* property. It is an expression property that evalues to *state*. If the expression fails, then the *state* remains unmodified. """ state_name = StringProperty(default='state', title="State Name", advanced=True) exclude = BoolProperty(default=True, title="Exclude Existing Fields", order=2) def _process_group(self, signals, group, input_id, signals_to_notify): """Process the signals for a group.""" signals_to_notify = [] for signal in signals: state_change = self._process_state(signal, group) if state_change is not None: # If we are excluding existing fields we want to add # the states and previous states to an empty signal if self.exclude(): signal = Signal() setattr(signal, 'prev_{}'.format(self.state_name()), state_change[0]) setattr(signal, '{}'.format(self.state_name()), state_change[1]) setattr(signal, 'group', group) signals_to_notify.append(signal) return signals_to_notify
class Picamera(Block): version = VersionProperty('0.1.0') file_name = StringProperty(title='Image Name', default='image') file_type = SelectProperty(Filetypes, title='File Type', default=Filetypes.JPEG) preview = BoolProperty(title='Open Preview Window', default=False) count = 0 def configure(self, context): super().configure(context) self.camera = PiCamera() if self.preview(): self.camera.start_preview() sleep(2) def process_signals(self, signals): for signal in signals: image_name = '{}_{}.{}'.format(self.file_name(), self.count, self.file_type().value) self.camera.capture('{}'.format(image_name), format=self.file_type().value) self.count += 1 self.notify_signals(signals) def stop(self): if self.preview: self.camera.stop_preview() self.camera.close() super().stop()
class SimplifyPolyline(GroupBy, Block): version = VersionProperty('0.1.0') high_quality = BoolProperty(default=True, title='High Quality') tolerance = FloatProperty(default=0.1, title='Tolerance') x_attr = Property(default='{{ $x }}', title='X Attribute') y_attr = Property(default='{{ $y }}', title='Y Attribute') def process_group_signals(self, signals, group, input_id=None): points = [] for signal in signals: point = {'x': self.x_attr(signal), 'y': self.y_attr(signal)} points.append(point) simplified = simplify(points, self.tolerance(), self.high_quality()) outgoing_signals = [] for result in simplified: signal_dict = {'x': result['x'], 'y': result['y'], 'group': group} outgoing_signals.append(Signal(signal_dict)) return outgoing_signals
class TCPClient(Block): host = StringProperty(title='IP Address', default='127.0.0.1') message = StringProperty(title='Message', default='GET / HTTP/1.1\n') port = IntProperty(title='Port', default=50001) expect_response = BoolProperty(title='Expect response?', default=True) version = VersionProperty('0.0.1') def process_signals(self, signals): for signal in signals: message = self.message(signal).encode('utf-8') response = self.send_message(message) if response: signal.response = response self.notify_signals(signals) def send_message(self, message): response = None buffer_size = 8192 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.host(), self.port())) s.send(message) if self.expect_response(): response = s.recv(buffer_size) s.shutdown(2) s.close() return response
class Join(EnrichSignals, GroupBy, Block): """ Join block. Group a list of signals into one signal. The output signal will contain an attribute for each evaluated *key* and the value of that attribute will be a list with an item of *value* for each matching signal. If *one_value* is True, the Signal attributes will be just a single matching value instead of a list of all matching values. If multiple matches, then the last signal processed will be the value used. """ key = StringProperty(title='Key', default="{{ $key }}") value = Property(title='Value', default="{{ $value }}", allow_none=True) group_attr = StringProperty(title="Group Attribute Name", default="group", visible=False) one_value = BoolProperty(title="One Value Per Key", default=False) version = VersionProperty("1.0.0") def process_signals(self, signals, input_id='default'): self.notify_signals( self.for_each_group(self._get_hash_from_group, signals)) def _get_hash_from_group(self, signals, group): self.logger.debug("Processing group {} of {} signals".format( group, len(signals))) out_sig = self._perform_hash(signals) if out_sig: setattr(out_sig, self.group_attr(), group) return out_sig def _perform_hash(self, signals): hash_dict = defaultdict(None) if self.one_value() \ else defaultdict(list) for signal in signals: sig_key = self.key(signal) sig_value = self.value(signal) # Add sig_value to the proper hash key try: if sig_key is not None: if self.one_value(): hash_dict[sig_key] = sig_value else: hash_dict[sig_key].append(sig_value) else: self.logger.debug("Skipping key: {}".format(sig_key)) except: self.logger.exception( "Failed to add value {} to key {}".format( sig_value, sig_key)) if len(hash_dict): return self.get_output_signal(hash_dict, signals[-1])
class Case(PropertyHolder): when = Property(default='', title='When', order=0) attributes = ListProperty(SignalField, title="Attributes", default=[], order=2) exclude = BoolProperty(default=False, title='Exclude existing attributes?', order=1)
class Layers(PropertyHolder): count = IntProperty(title='Number of Neurons', default=10) activation = SelectProperty(ActivationFunctions, title='Activation Function', default=ActivationFunctions.softmax) initial_weights = SelectProperty(InitialValues, title='Initial Weight Values', default=InitialValues.random) bias = BoolProperty(title='Add Bias Unit', default=True)
class Sensors(PropertyHolder): IRtemperature = BoolProperty(title="IR Temperature", default=True) accelerometer = BoolProperty(title="Accelerometer", default=False) humidity = BoolProperty(title="Humidity", default=False) magnetometer = BoolProperty(title="Magnetometer", default=False) barometer = BoolProperty(title="Barometer", default=False) gyroscope = BoolProperty(title="Gyroscope", default=False) keypress = BoolProperty(title="Keypress", default=False)
class Reversable(): """ A dynamo block mixin that allows you to reverse results """ reverse = BoolProperty(title='Reverse', default=False) def _build_query_dict(self, signal=None): existing_args = super()._build_query_dict(signal) if self.reverse(): existing_args['reverse'] = self.reverse() return existing_args
class MNISTImageLoader(EnrichSignals, Block): """Generates pixel data and labels from MNIST handwriting dataset. If not already present in `data/` the source data will be downloaded automatically. The output signal is ready to use by a NeuralNetwork block. Each signal processed loads the next `batch_size` images from the dataset corresponding to `input_id`. """ version = VersionProperty("0.3.0") batch_size = IntProperty(title='Images per Batch', default=100) reshape = BoolProperty(title='Flatten Images', default=False) shuffle = BoolProperty(title='Shuffle Batch', default=True, visible=False) def __init__(self): super().__init__() self.mnist = None def configure(self, context): super().configure(context) self.mnist = mnist_data.read_data_sets('data', one_hot=True, reshape=self.reshape(), validation_size=0) def process_signals(self, signals, input_id=None): output_signals = [] for signal in signals: kwargs = { 'batch_size': self.batch_size(signal), 'shuffle': self.shuffle(signal) } batch = getattr(self.mnist, input_id).next_batch(**kwargs) new_signal = self.get_output_signal( { 'batch': batch[0], 'labels': batch[1], 'input_id': input_id }, signal) output_signals.append(new_signal) self.notify_signals(output_signals)
class Connection(PropertyHolder): """ Mongo connection properties Properties: host (str): Database host port (int): Database port ssl (bool): Whether or not to use SSL when connecting (certs won't be validated) """ host = StringProperty(title='Mongo Host', default="127.0.0.1") port = IntProperty(title='Port', default=27017) ssl = BoolProperty(title='Use SSL', default=False)
class UDPServer(Block): """ A block for receiving UDP data """ host = StringProperty(title="Listener Host", default='[[NIOHOST]]') port = IntProperty(title="Listener Port", allow_none=False) threaded = BoolProperty(title="User threads", default=False) packet_size = IntProperty(title="Packet size", default=8192) def __init__(self): super().__init__() self._server = None def _create_server(self): server_class = ThreadedUDPServer if self.threaded( ) else SingleUDPServer return server_class((self.host(), self.port()), UDPDataHandler, self._handle_input) def configure(self, context): super().configure(context) try: self._server = self._create_server() self._server.max_packet_size = self.packet_size() self.logger.info("UDP Server listening on %s:%s" % (self.host(), self.port())) except Exception as e: self.logger.error("Failed to create server - {0} : {1}".format( type(e).__name__, e)) raise def start(self): super().start() if self._server: spawn(self._server.serve_forever) else: self.logger.warning("Server did not exist, so it was not started") def stop(self): if self._server: self._server.shutdown() self.logger.info("UDP Server stopped") super().stop() def _handle_input(self, raw_data): if raw_data is None: self.logger.warning("Receiving invalid data") return self.notify_signals([Signal({"data": raw_data})])
class ConditionalModifier(Block): """ Conditional Modifier block. Adds a new new field, *title*, to input signals. The value of the attribute is determined by the *lookup* parameter. *lookup* is a list of formula/value pairs. In order, the *formula* of *lookup* are evaluated and when an evaluation is *True*, the *value* is assigned to the signal attribute *title*. If multiple formulas match, the first value is the one that is assigned to the signal. """ fields = ListProperty(SignalField, title='Fields', default=[], order=0) exclude = BoolProperty(default=False, title='Exclude existing fields?') version = VersionProperty("1.1.0") def process_signals(self, signals): fresh_signals = [] for signal in signals: # if we are including only the specified fields, create # a new, empty signal object tmp = Signal() if self.exclude() else signal # iterate over the specified fields, evaluating the formula # in the context of the original signal for field in self.fields(): value = self._evaluate_lookup(field.lookup(), signal) setattr(tmp, field.title(), value) # only rebuild the signal list if we're using new objects if self.exclude: fresh_signals.append(tmp) if self.exclude: signals = fresh_signals self.notify_signals(signals) def _evaluate_lookup(self, lookup, signal): for lu in lookup: value = lu.formula(signal) if value: return lu.value(signal)
class Modifier(Block): """ A nio block for enriching signals. By default, the modifier block adds attributes to existing signals as specified. If the 'exclude' flag is set, the block instantiates new (generic) signals and passes them along with *only* the specified fields. Properties: - fields(list): List of attribute names and corresponding values to add to the incoming signals. - exclude(bool): If `True`, output signals only contain the attributes specified by `fields`. """ exclude = BoolProperty(default=False, title='Exclude existing fields?', order=0) fields = ListProperty(SignalField, title='Fields', default=[], order=1) version = VersionProperty("1.1.0") def process_signals(self, signals): fresh_signals = [] for signal in signals: # if we are including only the specified fields, create # a new, empty signal object tmp = Signal() if self.exclude() else signal # iterate over the specified fields, evaluating the formula # in the context of the original signal for field in self.fields(): value = field.formula(signal) title = field.title(signal) setattr(tmp, title, value) # only rebuild the signal list if we're using new objects if self.exclude: fresh_signals.append(tmp) if self.exclude(): signals = fresh_signals self.notify_signals(signals)
class MongoDBRemove(MongoDBBase): """ A block for running `remove` against a mongodb. Properties: condition (expression): A dictionary form of a remove expression. This is an expression property that can evaluate to a dictionary or be a parseable JSON string """ condition = Property(title='Condition', default="{'id': {'$gt': 0}}") only_one = BoolProperty(title='Maximum One Deletion', default=False) def execute_query(self, collection, signal): condition = self.evaluate_expression(self.condition, signal) self.logger.debug("Deleting on condition {}".format(condition)) res = collection.remove(condition, multi=(not self.only_one())) return [{'deleted': res.get('n', 0)}]
class FirebaseStream(FirebaseBase, GeneratorBlock): version = VersionProperty("1.1.1") show_root = BoolProperty(title='Return Root Data?', default=True) def __init__(self): super().__init__() self.stream = None self.stream_start = False def start(self): super().start() self._connect_stream() def stop(self): self.stream.close() super().stop() def _refresh_auth(self): self.logger.info("Closing database stream for auth refresh") self.stream.close() super()._refresh_auth() # Reopen the stream using the refreshed user credentials self._connect_stream() def _connect_stream(self): self.logger.info("Connecting to stream") self.stream_start = True self.stream = self.db.child(self.collection()).\ stream(self.stream_handler, self.user['idToken']) self.logger.info("New database stream opened") def stream_handler(self, message): if not self.show_root() and self.stream_start: self.stream_start = False pass else: signal = Signal({ "event": message['event'], "path": message['path'], "data": message['data'] }) self.notify_signals([signal])
class CaptureFrame(Block): version = VersionProperty('2.0.0') camera = IntProperty(title='Camera Index', default=0) ipcam = BoolProperty(title='Use IP Camera?', default=False) ipcam_address = StringProperty(title='IP Camera Address', default='') def __init__(self): super().__init__() self.video_capture = None def start(self): if not self.ipcam(): self.video_capture = cv2.VideoCapture(self.camera()) def process_signals(self, signals): for signal in signals: if self.ipcam(): done = False stream = urllib.request.urlopen(self.ipcam_address()) ipbytes = bytes() while not done: ipbytes += stream.read(1024) a = ipbytes.find(b'\xff\xd8') b = ipbytes.find(b'\xff\xd9') if a != -1 and b != -1: done = True jpg = ipbytes[a:b + 2] ipbytes = ipbytes[b + 2:] frame = cv2.imdecode( numpy.fromstring(jpg, dtype=numpy.uint8), cv2.IMREAD_UNCHANGED) else: try: ret, frame = self.video_capture.read() except: break sig = Signal( {"capture": base64.b64encode(pickle.dumps(frame)).decode()}) self.notify_signals([sig])
class PhillipsHue(Block): version = VersionProperty('0.1.0') hub_config = ObjectProperty(HubConfig, title='Hub Configuration', default=HubConfig()) light_config = ObjectProperty(LightConfig, title='Light Configuration', default=LightConfig()) kill_switch = BoolProperty(title='Turn off Light at Service Stop?', default=True, advanced=True) def process_signals(self, signals): for signal in signals: self.data = {} self.api_url = 'http://{0}/api/{1}/lights/{2}/state'.format( self.hub_config().hub_ip(signal), self.hub_config().user_id(signal), self.hub_config().light_number(signal)) if self.light_config().on_state(signal) == 1: self.data["on"] = True self.data["hue"] = self.light_config().hue(signal) self.data["sat"] = self.light_config().sat(signal) self.data["bri"] = self.light_config().bri(signal) else: self.data["on"] = False x = requests.put(self.api_url, json=self.data) self.logger.debug(x.text) self.logger.debug(self.data) self.notify_signals(signals) def stop(self): if self.kill_switch: self.api_url = 'http://{0}/api/{1}/lights/{2}/state'.format( self.hub_config().hub_ip(), self.hub_config().user_id(), self.hub_config().light_number()) x = requests.put(self.api_url, json={"on": False}) self.logger.debug(x.text) super().stop()
class Data(PropertyHolder): params = ListProperty(Param, title="Parameters", default=[]) form_encode_data = BoolProperty(default=False, title="Form-Encode Data?")
class PostgresBase(LimitLock, Retry, TerminatorBlock): """A block for communicating with an postgres database. Properties: host(str): hostname of the database to connect to port(int): postgres port on the host to connect to db_name(str): name of the database on the host creds(object): username and password for the host database table_name(str): name of the table on the database to execute commands on. commit_all(bool): hidden attribute that configures whether to commit valid transactions ssl_mode(select): whether to require or prefer an SSL connection. ssl_cert(file): path to SSL cert to use for an SSL connection. """ version = VersionProperty('1.0.0') host = StringProperty(title="Host", default="[[POSTGRES_HOST]]") port = IntProperty(title="Port", default="[[POSTGRES_PORT]]") db_name = StringProperty(title="DB Name", allow_none=False) creds = ObjectProperty(AuthCreds, title="Credentials", default=AuthCreds()) table_name = StringProperty(title="Table Name", allow_none=False) commit_all = BoolProperty(title="Commit transactions", default=True, visible=False) ssl_mode = SelectProperty(SSLOption, default=SSLOption.prefer, title='SSL Option') ssl_cert = FileProperty(title="SSL cert path", default='/etc/ssl_cert.pem') def __init__(self): super().__init__() self._conn = None self._cur = None self.column_names = [] def configure(self, context): super().configure(context) # validate any user-given variables self._validate_string(self.table_name()) self.connect() # create list of column names for insertion validation self._cur.execute("SELECT column_name FROM information_schema.columns " "WHERE table_name = '{}';".format(self.table_name())) self.column_names = [row[0] for row in self._cur] def stop(self): self.logger.debug('closing postgres connection...') self.disconnect() super().stop() def process_signals(self, signals): self.execute_with_lock(self.execute_with_retry, 100, self._locked_process_signals, signals=signals) def _locked_process_signals(self, signals): pass def connect(self): """connect to the database and create the cursor object for executing commands """ self.logger.debug('Connecting to postgres db...') self._conn = connect(database=self.db_name(), user=self.creds().username(), password=self.creds().password(), host=self.host(), port=self.port(), sslmode=self.ssl_mode().value, sslrootcert=self.ssl_cert().value) self._cur = self._conn.cursor() def disconnect(self): """disconnect from the database and close the cursor object""" self._cur.close() self._conn.close() def _validate_column_name(self, key): # make sure user input column name is exactly equal to one of the # column names queried in PostgresBase.configure() if key not in self.column_names: raise ValueError("{} is not a valid column in the {} table. " "Valid columns: {}".format( key, self.table_name(), self.column_names)) @staticmethod def _validate_string(string): """validate any string going into an SQL statement to protect against SQL injection. Every valid SQL identifier and keyword must obey the format represented by the regex below. If the variable is found to be invalid, this fails configuration of this block.""" if not re.match("^[a-zA-Z_][a-zA-Z0-9_]*$", string): raise ValueError("SQL keyword or identifier '{}' did not pass " "validation.".format(string))
class CronTrigger(): """ Notify signals accoriding to cron-like timetable """ cron = ObjectProperty(CronConf, title='Cron Schedule', default=CronConf()) utc = BoolProperty(title='UTC', default=False) def __init__(self): super().__init__() self._job = None self._cron_specs = None def configure(self, context): super().configure(context) # TODO: check that the config is valid cron syntax self._cron_specs = [ self.cron().minute(), self.cron().hour(), self.cron().day_of_month(), self.cron().month(), self.cron().day_of_week() ] def start(self): super().start() # Like crontab, check to run jobs every minute self._job = Job(self._cron, timedelta(minutes=1), True) # Run a cron cycle immediately, but in a new thread since it # might take some time and we don't want it to hold up start spawn(self._cron) def stop(self): """ Stop the simulator thread and signal generation """ if self._job: self._job.cancel() super().stop() def _cron(self): """ Called every minute to check if cron job should notify signals """ self.logger.debug("Checking if cron emit should run") if (self.utc()): now = datetime.utcnow() else: now = datetime.now() now = [ str(now.minute), str(now.hour), str(now.day), str(now.month), str(now.weekday()) ] if self._check_cron(now): spawn(self._emit) def _check_cron(self, now): """ Return True if cron property matches with `now` `now` is list containing the 5 cron field """ for i in range(5): # '*' should match no matter what if self._cron_specs[i] == '*': now[i] = '*' # TODO: handle more interesting cron settings than just numbers and '*' return now == self._cron_specs def _emit(self): self.logger.debug("Generating signals") signals = self.generate_signals() # If a generator is returned, build the list if not isinstance(signals, list): signals = list(signals) if signals: self.logger.debug("Notifying {} signals".format(len(signals))) self.notify_signals(signals) else: self.logger.debug("No signals generated")
class CustomEnrichProperties(EnrichProperties): """ Overrides default enrichment to include existing fields.""" exclude_existing = BoolProperty(title='Exclude Existing?', default=False)
class Logger(TerminatorBlock): """ Logger block. A NIO block for logging arbitrary signals. """ # this is overidden here to change the default log_level from the base # block log_level = SelectProperty(LogLevel, title="Log Level", default="INFO", advanced=True, order=0) log_at = SelectProperty(LogLevel, title="Log At", default="INFO", order=0) log_as_list = BoolProperty(title="Log as a List", default=True, advanced=True, order=1) log_hidden_attributes = BoolProperty(title="Log Hidden Attributes", default=False, advanced=True, order=2) version = VersionProperty("1.3.1") def process_signals(self, signals): """ Overridden from the block interface. When an instance of Logger is in the receivers list for some other block, this method allows the sending block to deliver its outgoing signal object to the logger, which logs them as a list. Args: signals (list of Signals): a list of signals to be logged. Returns: None """ log_func = self._get_logger() if self.log_as_list(): self._log_signals_as_list(log_func, signals) else: self._log_signals_sequentially(log_func, signals) def _log_signals_as_list(self, log_func, signals): try: log_func('[{}]'.format(', '.join([ json.dumps(signal.to_dict(self.log_hidden_attributes()), default=str, sort_keys=True) for signal in signals ]))) except: self.logger.exception("Failed to log {} signals".format( len(signals))) def _log_signals_sequentially(self, log_func, signals): for s in signals: try: log_func( json.dumps(s.to_dict(self.log_hidden_attributes()), default=str, sort_keys=True)) except: self.logger.exception("Failed to log signal") def _get_logger(self): """ Returns a function that can log, based on the current config. This will return a different log level function based on what this block is configured to log at. """ if isinstance(self.log_at(), LogLevel): log_str = self.log_at().name.lower() else: log_str = str(self.log_at()).lower() return getattr(self.logger, log_str, self.logger.error) def log(self, phrase="None provided"): self._get_logger()( "Command log called with phrase: {0}".format(phrase))
class MergeStreams(Persistence, GroupBy, Block): """ Take two input streams and combine signals together. """ expiration = TimeDeltaProperty(default={}, title="Stream Expiration") notify_once = BoolProperty(default=True, title="Notify Once?") version = VersionProperty('0.1.0') def _default_signals_dict(self): return {"input_1": {}, "input_2": {}} def _default_expiration_jobs_dict(self): return {"input_1": None, "input_2": None} def __init__(self): super().__init__() self._signals = defaultdict(self._default_signals_dict) self._signals_lock = defaultdict(Lock) self._expiration_jobs = defaultdict(self._default_expiration_jobs_dict) def persisted_values(self): """Persist signals only when no expiration (ttl) is configured. Signals at each input will be persisted between block restarts except when an expiration is configured. TODO: Improve this feature so signals are always persisted and then properly removed after loaded and the expiration has passed. """ if self.expiration(): return [] else: return ["_signals"] def process_group_signals(self, signals, group, input_id): merged_signals = [] with self._signals_lock[group]: for signal in signals: self._signals[group][input_id] = signal signal1 = self._signals[group]["input_1"] signal2 = self._signals[group]["input_2"] if signal1 and signal2: merged_signal = self._merge_signals(signal1, signal2) merged_signals.append(merged_signal) if self.notify_once(): self._signals[group]["input_1"] = {} self._signals[group]["input_2"] = {} if self.expiration(): self._schedule_signal_expiration_job(group, input_id) return merged_signals def _merge_signals(self, signal1, signal2): """ Merge signals 1 and 2 and clear from memory if only notify once """ sig_1_dict = signal1.to_dict() sig_2_dict = signal2.to_dict() self._fix_to_dict_hidden_attr_bug(sig_1_dict) self._fix_to_dict_hidden_attr_bug(sig_2_dict) merged_signal_dict = {} merged_signal_dict.update(sig_1_dict) merged_signal_dict.update(sig_2_dict) return Signal(merged_signal_dict) def _fix_to_dict_hidden_attr_bug(self, signal_dict): """ Remove special attributes from dictionary n.io has a bug when using Signal.to_dict(hidden=True). It should include private attributes (i.e. attributes starting withe '_') but not special attributes (i.e. attributes starting with '__'). """ for key in list(signal_dict.keys()): if key.startswith('__'): del signal_dict[key] def _schedule_signal_expiration_job(self, group, input_id): """ Schedule expiration job, cancelling existing job first """ if self._expiration_jobs[group][input_id]: self._expiration_jobs[group][input_id].cancel() self._expiration_jobs[group][input_id] = Job( self._signal_expiration_job, self.expiration(), False, group, input_id) def _signal_expiration_job(self, group, input_id): self._signals[group][input_id] = {} self._expiration_jobs[group][input_id] = None
class AddTimestamp(EnrichSignals, Block): utc = BoolProperty(title='UTC', default=True) output_attr = StringProperty( title='Outgoing Signal Attribute', default='timestamp', order=0, advanced=True) milliseconds = BoolProperty( title='Milliseconds', default=True, order=1, advanced=True) enrich = ObjectProperty( CustomEnrichProperties, title='Signal Enrichment', default=CustomEnrichProperties(), # use custom default order=100, advanced=True) version = VersionProperty('0.1.0') def process_signals(self, signals): current_time = self._get_current_time() output_signals = [] for signal in signals: signal_dict = { self.output_attr(signal): current_time, } output_signal = self.get_output_signal(signal_dict, signal) output_signals.append(output_signal) self.notify_signals(output_signals) def _get_current_time(self): """ Return an ISO-formatted string. Helper `_truncate_` functions are to support Python < 3.6, after which `datetime.isoformat()` takes a `timespec` arg.""" if self.utc(): now = datetime.utcnow() if not self.milliseconds(): now = self._truncate_fractional_seconds(now) current_time = now.isoformat() + 'Z' else: current_time = now.isoformat() current_time = self._truncate_microseconds(current_time) + 'Z' return current_time # get local timestamp now = datetime.now() if not self.milliseconds(): now = self._truncate_fractional_seconds(now) current_time = str(self._localize_time(now)) else: current_time = str(self._localize_time(now)) current_time = self._truncate_microseconds(current_time) # remove colon from TZ info (±HHMM format) # TODO: Add options for formats ±HH:MM, ±HH current_time = ''.join(current_time.rsplit(':', maxsplit=1)) return current_time @staticmethod def _localize_time(now): """ Return datetime `now` with local timezone.""" current_time_with_tz = get_localzone().localize(now) current_time = current_time_with_tz.isoformat() return current_time @staticmethod def _truncate_fractional_seconds(now): """ Return a datetime equal to `now` with `microsecond=0`""" now = datetime( year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, microsecond=0) return now @staticmethod def _truncate_microseconds(timestamp): """ Remove microseconds from string `timestamp`""" base, suffix = timestamp.split('.') microseconds, offset = suffix[:6], suffix[6:] milliseconds = microseconds[:3] suffix = milliseconds + offset timestamp = '.'.join([base, suffix]) return timestamp