class Location(PropertyHolder): southwest = ObjectProperty(Coordinate, default=Coordinate(), title='Southwest') northeast = ObjectProperty(Coordinate, default=Coordinate(), title='Northeast')
class MSSQLBase(Block): connection = ObjectProperty(Connection, title='Database Connection', order=50) mars = ObjectProperty(Mars, title='Multiple Active Result Sets (MARS)', order=60) def __init__(self): super().__init__() self.cnxn = None self.is_connecting = False def configure(self, context): super().configure(context) self.connect() def connect(self): self.is_connecting = True cnxn_props = self.connection() mars_enabled = self.mars().enabled() cnxn_string = ('DRIVER={};' 'PORT={};' 'SERVER={};' 'DATABASE={};' 'UID={};' 'MARS_Connection={};' 'PWD={}').format('{ODBC Driver 17 for SQL Server}', cnxn_props.port(), cnxn_props.server(), cnxn_props.database(), cnxn_props.user_id(), 'yes' if mars_enabled else 'no', cnxn_props.password()) self.logger.debug('Connecting: {}'.format(cnxn_string)) self.cnxn = pyodbc.connect(cnxn_string) self.is_connecting = False def disconnect(self): if self.cnxn: self.cnxn.close() self.cnxn = None def stop(self): super().stop() self.disconnect() def _get_cursor(self): try: return self.cnxn.cursor() except Exception as e: self.disconnect() self.connect() return self.cnxn.cursor()
class HarperDBBase(Block): version = VersionProperty('0.1.0') dbserver = ObjectProperty(DatabaseConnection, title='Database Connection', order=0) dbcreds = ObjectProperty(DatabaseCredentials, title='Database Credentials', order=1) def __init__(self): super().__init__() def configure(self, context): super().configure(context) protocol = "https" if self.dbserver().ssl() else "http" self.url = "{}://{}:{}".format(protocol, self.dbserver().server(), self.dbserver().port()) self.headers = { "Content-Type": "application/json", "Authorization": "Basic {}".format( base64.b64encode( bytes( '{}:{}'.format(self.dbcreds().userid(), self.dbcreds().password()), 'utf-8')).decode("ascii")) } def sendQuery(self, payload): result = requests.post( self.url, headers=self.headers, json=payload, allow_redirects=self.dbserver().allow_redirects(), timeout=self.dbserver().timeout()) return json.loads(result.text) def get_job_result(self, jobid): payload = {'operation': 'get_job', 'id': jobid} result = self.sendQuery(payload) resultObject = result[0] if resultObject["status"] and resultObject["status"] == "COMPLETE": return resultObject else: time.sleep(1) return self.get_job_result(jobid)
class HTTPRequests(HTTPRequestsBase): """ A Block that makes HTTP Requests. Makes the configured request with the configured data parameters, evaluated in the context of incoming signals. Properties: url (str): URL to make request to. basic_auth_creds (obj): Basic Authentication credentials. http_method (select): HTTP method (ex. GET, POST, PUT, DELETE, etc). data (obj): URL Parameters. headers (list(dict)): Custom headers. """ version = VersionProperty("0.2.0") data = ObjectProperty(Data, title="Parameters", default=Data(), order=3) http_method = SelectProperty(HTTPMethod, default=HTTPMethod.GET, title='HTTP Method', order=0) def _create_payload(self, signal): payload = {} for param in self.data().params(): param_key = param.key(signal) param_value = param.value(signal) payload[param_key] = param_value if payload and not self.data().form_encode_data(): payload = json.dumps(payload) return payload
class GPIOInterrupts(Block): pin = IntProperty(default=0, title="Pin Number") version = VersionProperty('0.1.0') interrupt_trigger = ObjectProperty(Trigger, title="Trigger on which edge:", default=Trigger()) def __init__(self): super().__init__() self._gpio = None def configure(self, context): super().configure(context) self._gpio = GPIODevice(self.logger) # TODO: allow more than one pin to be configured per block self._gpio.interrupt( self._callback, self.pin(), self.interrupt_trigger().default().value) def stop(self): self._gpio.close() super().stop() def process_signals(self, signals): pass def _callback(self, channel): self.logger.debug( "Interrupt callback invoked by pin: {}".format(channel)) self.notify_signals(Signal({"pin": channel}))
class GPIOInterrupts(GeneratorBlock): pin = IntProperty(default=0, title="Pin Number") version = VersionProperty("0.1.1") pull_up_down = ObjectProperty(PullUpDown, title="Pull Resistor Up/Down", default=PullUpDown()) def __init__(self): super().__init__() self._gpio = None def start(self): # TODO: allow more than one pin to be configured per block self._gpio.interrupt(self._callback, self.pin(), self.pull_up_down().default().value) super().start() def configure(self, context): super().configure(context) self._gpio = GPIODevice(self.logger) def stop(self): self._gpio.close() super().stop() def process_signals(self, signals): pass def _callback(self, channel): self.logger.debug( "Interrupt callback invoked by pin: {}".format(channel)) self.notify_signals(Signal({"pin": channel}))
class SenseHAT(Block, EnrichSignals): imu = ObjectProperty(IMUsensor, title='IMU Sensor') version = VersionProperty('0.1.0') def __init__(self): super().__init__() self.hat = None def configure(self, context): super().configure(context) self.hat = SenseHat() self.hat.set_imu_config( self.imu().accel(), self.imu().compass(), self.imu().gyro()) def process_signals(self, signals): data = {} if self.imu().accel(): data['accelerometer'] = self.hat.get_accelerometer_raw() if self.imu().compass(): data['compass'] = self.hat.get_compass_raw() if self.imu().gyro(): data['gyroscope'] = self.hat.get_gyroscope_raw() outgoing_signals = [] for signal in signals: outgoing_signals.append(self.get_output_signal(data, signal)) self.notify_signals(outgoing_signals)
class WatsonToneAnalyzer(EnrichSignals, Block): version = VersionProperty("1.0.0") creds = ObjectProperty(AuthCreds, title="Bluemix Credentials", default=AuthCreds()) data_attr = Property(title='Data Field', default='{{ $text }}') def __init__(self): self.tone_analyzer = None super().__init__() def configure(self, context): super().configure(context) self.tone_analyzer = ToneAnalyzerV3(username=self.creds().username(), password=self.creds().password(), version='2016-05-19') def process_signals(self, signals): new_signals = [] for signal in signals: out_sig = self.get_output_signal( self.tone_analyzer.tone(self.data_attr(signal)), signal) new_signals.append(out_sig) self.notify_signals(new_signals)
class PhillipsHue(Block): version = VersionProperty('0.1.0') hub_config = ObjectProperty(HubConfig, title='Hub Configuration', default=HubConfig()) light_config = ObjectProperty(LightConfig, title='Light Configuration', default=LightConfig()) kill_switch = BoolProperty(title='Turn off Light at Service Stop?', default=True, advanced=True) def process_signals(self, signals): for signal in signals: self.data = {} self.api_url = 'http://{0}/api/{1}/lights/{2}/state'.format( self.hub_config().hub_ip(signal), self.hub_config().user_id(signal), self.hub_config().light_number(signal)) if self.light_config().on_state(signal) == 1: self.data["on"] = True self.data["hue"] = self.light_config().hue(signal) self.data["sat"] = self.light_config().sat(signal) self.data["bri"] = self.light_config().bri(signal) else: self.data["on"] = False x = requests.put(self.api_url, json=self.data) self.logger.debug(x.text) self.logger.debug(self.data) self.notify_signals(signals) def stop(self): if self.kill_switch: self.api_url = 'http://{0}/api/{1}/lights/{2}/state'.format( self.hub_config().hub_ip(), self.hub_config().user_id(), self.hub_config().light_number()) x = requests.put(self.api_url, json={"on": False}) self.logger.debug(x.text) super().stop()
class MojioBase(OAuth2PasswordGrant, RESTPolling): """ A base block for making requests to the Moj.io API """ creds = ObjectProperty(MojioCreds, title='Moj.io Credentials') def __init__(self): super().__init__() # We want the created at dates to be pulled out of the Time field self._created_field = 'Time' def get_oauth_base_url(self): return 'https://api.moj.io/OAuth2/' def _authenticate(self): """ Overridden from REST Polling block - get OAuth token here """ try: token_info = self.get_access_token( username=self.creds().username(), password=self.creds().password(), addl_params={ 'client_id': self.creds().client_id(), 'client_secret': self.creds().client_secret() }) self.logger.debug("Token retrieved: {}".format(token_info)) except: self.logger.exception("Error obtaining access token") def _prepare_url(self, paging=False): self._url = "{}{}".format(MOJIO_URL_BASE, self._get_url_endpoint()) if not self.authenticated(): self.logger.error("You must be authenticated to poll") return try: return {'MojioAPIToken': self._oauth_token.get('access_token')} except: self.logger.exception("Unable to set header with access token") def _process_response(self, resp): """ Overridden from RESTPolling - returns signals to notify """ # By default, just return all of the signals we get, no paging return [Signal(d) for d in self._get_dicts_from_response(resp)], False def _get_dicts_from_response(self, resp): resp_data = resp.json() return resp_data['Data'] def _get_url_endpoint(self): raise NotImplementedError
class SenseHAT(Block, EnrichSignals): env = ObjectProperty(EnvironmentalSensors, title='Environmental Sensors', order=0) imu = ObjectProperty(IMUsensor, title='IMU Sensor', order=1) version = VersionProperty('0.1.0') def __init__(self): super().__init__() self.hat = None def configure(self, context): super().configure(context) self.hat = SenseHat() self.hat.set_imu_config(self.imu().accel(), self.imu().compass(), self.imu().gyro()) def process_signals(self, signals): data = {} if self.imu().accel(): data['accelerometer'] = self.hat.get_accelerometer_raw() if self.imu().compass(): data['compass'] = self.hat.get_compass_raw() if self.imu().gyro(): data['gyroscope'] = self.hat.get_gyroscope_raw() if self.env().rh(): data['relative_humidity'] = self.hat.get_humidity() if self.env().temp(): data['temperature_C'] = self.hat.get_temperature() if self.env().press(): data['pressure_mbar'] = self.hat.get_pressure() outgoing_signals = [] for signal in signals: outgoing_signals.append(self.get_output_signal(data, signal)) self.notify_signals(outgoing_signals)
class TwilioSMS(TerminatorBlock): recipients = ListProperty(Recipient, title='Recipients', default=[]) creds = ObjectProperty(TwilioCreds, title='Credentials') from_ = StringProperty(title='From', default='[[TWILIO_NUMBER]]') message = Property(title='Message', default='') version = VersionProperty("1.0.0") def __init__(self): super().__init__() self._client = None def configure(self, context): super().configure(context) self._client = TwilioRestClient(self.creds().sid(), self.creds().token()) def process_signals(self, signals): for s in signals: self._send_sms(s) def _send_sms(self, signal): try: message = self.message(signal) for rcp in self.recipients(): Thread(target=self._broadcast_msg, args=(rcp, message)).start() except Exception as e: self.logger.error("Message evaluation failed: {0}: {1}".format( type(e).__name__, str(e))) def _broadcast_msg(self, recipient, message, retry=False): body = "%s: %s" % (recipient.name(), message) try: # Twilio sends back some useless XML. Don't care. response = self._client.messages.create(to=recipient.number(), from_=self.from_(), body=body) except TwilioRestException as e: self.logger.error("Status %d" % e.status) if not retry: self.logger.debug("Retrying failed request") self._broadcast_msg(recipient, message, True) else: self.logger.error("Retry request failed") except Exception as e: self.logger.error("Error sending SMS to %s (%s): %s" % (recipient.name(), recipient.number(), e))
class CounterGenerator(): """A fast numeric batch generator""" attr_name = StringProperty(default='sim', title='Simulated Attribute', order=2) attr_value = ObjectProperty(Value, title='Simulated Value', order=3) def __init__(self): super().__init__() self.count_lock = Lock() self._range = None self._range_length = 0 self._skip_count = 0 def configure(self, context): super().configure(context) if self.attr_value().step() != 0: self._range = range(self.attr_value().start(), self.attr_value().end() + 1, self.attr_value().step()) else: self._range = range(self.attr_value().start(), self.attr_value().start() + 1, 1) self._range_length = len(self._range) def generate_signals(self, n=1): with self.count_lock: # Build enough range objects to simulate n signals ranges = repeat(self._range, ceil(n / self._range_length) + 1) # Build an iterator to return the value # Skip some if we need to make sure we start at the right spot values_iterator = islice(chain.from_iterable(ranges), self._skip_count, None) # In case n is not divisible by the range length, we may need # to skip a number of items next time to make sure we start # counting in the right spot self._skip_count = (self._skip_count + n) % self._range_length return (Signal({name: value}) for ( name, value) in zip(repeat(self.attr_name(), n), values_iterator))
class IntercomTagUsers(TerminatorBlock): version = VersionProperty("1.0.0") access_token = StringProperty(title="Access Token", default="[[INTERCOM_ACCESS_TOKEN]]") tag_name = StringProperty(title="Name of Tag", default="HappyCustomer") user_info = ObjectProperty(UserInfo, title="User Info", default=UserInfo()) def process_signals(self, signals): for signal in signals: if self.user_info().email(signal) or self.user_info().id(signal): response = self._request( body={ "name": self.tag_name(signal), "users": [{ "email": self.user_info().email(signal) }] if self.user_info(). email(signal) else [{ "id": self.user_info().id(signal) }] }) if response.status_code != 200: raise Exception else: self.logger.error("No user info configured") raise Exception def _request(self, body={}): url = 'https://api.intercom.io/tags' kwargs = {} kwargs['headers'] = { "Authorization": "Bearer {}".format(self.access_token()), "Accept": "application/json", "Content-Type": "application/json", } kwargs['json'] = body response = getattr(requests, 'post')(url, **kwargs) if response.status_code != 200: self.logger.error("Http request failed: {} {}".format( response, response.json())) self.logger.debug("Http response: {}".format(response.json())) return response
class Hackster(OAuth2PasswordGrant, RESTPolling): version = VersionProperty('0.1.0') creds = ObjectProperty(HacksterCreds, title='Hackster Credentials') endpoint = StringProperty(title='Hackster Endpoint to poll', default='projects') def get_oauth_base_url(self): return 'https://www.hackster.io/oauth/' def _authenticate(self): try: token_info = self.get_access_token( grant_type='client_credentials', addl_params={ 'client_id': self.creds().client_id(), 'client_secret': self.creds().client_secret() }) self.logger.debug('Token retrieved: {}.'.format(token_info)) except: self.logger.exception('Error obtaining access token') def _prepare_url(self, paging=False): self._url = '{}{}'.format(HACKSTER_URL_BASE, self.endpoint()) if not self.authenticated(): self.logger.error('You must be authenticated to poll') return try: return self.get_access_token_headers() except: self.logger.exception('Unable to set header with access token') def _process_response(self, resp): """ Overridden from RESTPolling - returns signals to notify """ # By default, just return all of the signals we get, no paging return [Signal(d) for d in self._get_dicts_from_response(resp)], False def _get_dicts_from_response(self, resp): resp_data = resp.json() return resp_data['records']
class SQSBase(Block): """ This is the base block for integrating n.io with AWS SQS""" version = VersionProperty("1.0.0") creds = ObjectProperty( AWSCreds, title="AWS Credentials", default=AWSCreds()) # Queue to connect to queue_url = StringProperty(title="Queue URL", default="") def __init__(self): self.client = None super().__init__() def configure(self, context): super().configure(context) self.client = boto3.client( 'sqs', region_name=self.creds().region_name(), aws_access_key_id=self.creds().aws_access_key_id(), aws_secret_access_key=self.creds().aws_secret_access_key(), aws_session_token=self.creds().aws_secret_access_key())
class WatsonTextToSpeech(TerminatorBlock): version = VersionProperty("1.0.0") creds = ObjectProperty(AuthCreds, title="Bluemix Credentials", default=AuthCreds()) data_attr = Property(title='Text to Convert', default='{{ $text }}') speech_file_location = FileProperty(title='Directory to save audio files', default='etc/') voice = SelectProperty(Voices, default=Voices.EnglishUSLisa, title="Voice") def __init__(self): self.tts_engine = None super().__init__() def configure(self, context): super().configure(context) self.tts_engine = TextToSpeechV1(username=self.creds().username(), password=self.creds().password()) def process_signals(self, signals): for signal in signals: try: data = self.tts_engine.synthesize(text=self.data_attr(signal), voice=self.voice().value, accept="audio/wav") except WatsonException: self.logger.exception("Invalid Bluemix credentials: ") else: filename = str(uuid.uuid4()) + ".wav" with open(self.speech_file_location().value + filename, 'wb') \ as audio_file: audio_file.write(data) self.logger.info("Wrote speech file to {} for input text " "starting with '{}'".format( self.speech_file_location.value + filename, self.data_attr(signal)[:30]))
class S3Base(TerminatorBlock): """ This is the base block for integrating n.io with AWS S3""" creds = ObjectProperty( AWSCreds, title="AWS Credentials", default=AWSCreds()) # S3 bucket to upload to bucket_name = StringProperty( title="Bucket Name", default="{{ $bucket_name }}") # What to name file in S3 bucket key = StringProperty( title="S3 File Key", default="{{ $key }}") def __init__(self): self.client = None super().__init__() def configure(self, context): super().configure(context) self.client = boto3.client( 's3', aws_access_key_id=self.creds().aws_access_key_id(), aws_secret_access_key=self.creds().aws_secret_access_key(), aws_session_token=self.creds().aws_session_token())
class WatsonSpeechToText(Block): version = VersionProperty('1.0.0') creds = ObjectProperty(AuthCreds, title="Bluemix Credentials", default=AuthCreds()) speech_file_location = FileProperty(title='Path to audio file (.wav)', default='etc/speech.wav', mode='rb') def __init__(self): self.stt_engine = None super().__init__() def configure(self, context): super().configure(context) self.stt_engine = SpeechToTextV1(username=self.creds().username(), password=self.creds().password()) def process_signals(self, signals): new_signals = [] for signal in signals: try: with self.speech_file_location(signal) as speech_file: speech_data = speech_file.read() text_dict = self.stt_engine.recognize( audio=speech_data, content_type='audio/wav') except WatsonException: self.logger.exception("Invalid Bluemix credentials: ") except Exception: self.logger.exception("Failed to open speech file: {}".format( self.speech_file_location().file)) else: new_signals.append(Signal(text_dict)) self.logger.debug("Successfully read speech file '{}'".format( self.speech_file_location().file)) self.notify_signals(new_signals)
class FirebaseBase(Base): config = ObjectProperty(FirebaseAuthProperty, title="Authentication", order=0) collection = StringProperty(title='Database Collection', default='[[FIREBASE_COLLECTION]]', order=3) userEmail = StringProperty(title='Authenticated User Email', default='[[USER_EMAIL]]', order=1) userPassword = StringProperty(title='Authenticated User Password', default='[[USER_PASSWORD]]', order=2) authRefresh = IntProperty(title='Auth Refresh Interval', default=1800, advanced=True, order=4) def __init__(self): super().__init__() self.user = None self.db = None self.auth = None self._refresh_job = None def configure(self, context): super().configure(context) self._create_firebase() def start(self): """ Starts the block Begin the job to refresh the auth token every hour """ super().start() self._refresh_job = Job(self._refresh_auth, timedelta(seconds=self.authRefresh()), True) def stop(self): """ Stops the block Cancel the token refresh job """ self._refresh_job.cancel() super().stop() def _refresh_auth(self): self.logger.info("Refeshing user token") self.user = self.auth.refresh(self.user['refreshToken']) def _create_firebase(self): firebase = pyrebase.initialize_app(self.config().get_auth_object()) self.auth = firebase.auth() self.user = self.auth.sign_in_with_email_and_password( self.userEmail(), self.userPassword()) self.db = firebase.database() def _get_collection(self, signal=None): collection = self.collection(signal) if not collection.startswith('/'): return "/{}".format(collection) else: return collection
class FacebookPost(TerminatorBlock): version = VersionProperty("1.0.0") message = Property(title='Message', default='') feed_id = StringProperty(title='Feed ID (user, group, etc.)', default='me') creds = ObjectProperty(FacebookCreds, title='Credentials') def __init__(self): super().__init__() self._auth = None self._access_token = None def start(self): super().start() self._authenticate() def process_signals(self, signals): if self._check_permissions(): for s in signals: try: message = self.message(s) except Exception as e: self.logger.exception("Message evaluation failed:") continue self._post_to_feed(quote_plus(message)) else: self.logger.error("Insufficient permissions for id: {0}".format( self.feed_id())) def _post_to_feed(self, message): url = POST_URL.format(self.feed_id(), message, self._access_token) response = requests.post(url) status = response.status_code if status != 200: self.logger.error( "Facebook post failed with status {0}".format(status)) else: self.logger.debug("Posted to Facebook!") def _authenticate(self): """ Generates and records the access token for pending requests. """ if self.creds().consumer_key() is None or \ self.creds().app_secret() is None: self.logger.error("You need a consumer key and app secret") else: self._access_token = self._request_access_token() def _check_permissions(self): result = False url = PERMISSIONS_URL.format(self.feed_id(), self._access_token) response = requests.get(url) if response.status_code == 200: data = response.json().get('data')[0] or {} if data.get('publish_actions') == 1: result = True return result def _request_access_token(self): """ Request an access token directly from facebook. Args: None Returns: token (str): The access token, which goes on the end of a request. """ resp = requests.get( TOKEN_URL_FORMAT.format(self.creds().consumer_key(), self.creds().app_secret())) status = resp.status_code # If the token request fails, try to use the configured app id # and secret. This probably won't work, but the docs say that it # should. for more info, see: # https://developers.facebook.com/docs/facebook-login/access-tokens token = "{}.{}".format(self.creds().consumer_key(), self.creds().app_secret()) if status == 200: token = resp.text.split('access_token=')[1] else: self.logger.error("Facebook token request failed with status %d" % status) return token
class PostgresBase(LimitLock, Retry, TerminatorBlock): """A block for communicating with an postgres database. Properties: host(str): hostname of the database to connect to port(int): postgres port on the host to connect to db_name(str): name of the database on the host creds(object): username and password for the host database table_name(str): name of the table on the database to execute commands on. commit_all(bool): hidden attribute that configures whether to commit valid transactions ssl_mode(select): whether to require or prefer an SSL connection. ssl_cert(file): path to SSL cert to use for an SSL connection. """ version = VersionProperty('1.0.0') host = StringProperty(title="Host", default="[[POSTGRES_HOST]]") port = IntProperty(title="Port", default="[[POSTGRES_PORT]]") db_name = StringProperty(title="DB Name", allow_none=False) creds = ObjectProperty(AuthCreds, title="Credentials", default=AuthCreds()) table_name = StringProperty(title="Table Name", allow_none=False) commit_all = BoolProperty(title="Commit transactions", default=True, visible=False) ssl_mode = SelectProperty(SSLOption, default=SSLOption.prefer, title='SSL Option') ssl_cert = FileProperty(title="SSL cert path", default='/etc/ssl_cert.pem') def __init__(self): super().__init__() self._conn = None self._cur = None self.column_names = [] def configure(self, context): super().configure(context) # validate any user-given variables self._validate_string(self.table_name()) self.connect() # create list of column names for insertion validation self._cur.execute("SELECT column_name FROM information_schema.columns " "WHERE table_name = '{}';".format(self.table_name())) self.column_names = [row[0] for row in self._cur] def stop(self): self.logger.debug('closing postgres connection...') self.disconnect() super().stop() def process_signals(self, signals): self.execute_with_lock(self.execute_with_retry, 100, self._locked_process_signals, signals=signals) def _locked_process_signals(self, signals): pass def connect(self): """connect to the database and create the cursor object for executing commands """ self.logger.debug('Connecting to postgres db...') self._conn = connect(database=self.db_name(), user=self.creds().username(), password=self.creds().password(), host=self.host(), port=self.port(), sslmode=self.ssl_mode().value, sslrootcert=self.ssl_cert().value) self._cur = self._conn.cursor() def disconnect(self): """disconnect from the database and close the cursor object""" self._cur.close() self._conn.close() def _validate_column_name(self, key): # make sure user input column name is exactly equal to one of the # column names queried in PostgresBase.configure() if key not in self.column_names: raise ValueError("{} is not a valid column in the {} table. " "Valid columns: {}".format( key, self.table_name(), self.column_names)) @staticmethod def _validate_string(string): """validate any string going into an SQL statement to protect against SQL injection. Every valid SQL identifier and keyword must obey the format represented by the regex below. If the variable is found to be invalid, this fails configuration of this block.""" if not re.match("^[a-zA-Z_][a-zA-Z0-9_]*$", string): raise ValueError("SQL keyword or identifier '{}' did not pass " "validation.".format(string))
class FacebookFeed(RESTPolling): """ This block polls the Facebook Graph API, using the feed endpoint Params: phrase (str): The phrase with which to search posts. Need not be url-quoted. limit (int): Maximum number of posts contained in each response. lookback (timedelta): Initial window of desirable posts (for the very first request. """ URL_FORMAT = ("https://graph.facebook.com/v2.2/" "{}/{}?since={}&limit={}") TOKEN_URL_FORMAT = ("https://graph.facebook.com/oauth" "/access_token?client_id={0}&client_secret={1}" "&grant_type=client_credentials") creds = ObjectProperty(Creds, title='Credentials', default=Creds()) lookback = TimeDeltaProperty(title='Lookback', default={"seconds": 0}) limit = IntProperty(title='Limit (per poll)', default=10) feed_type = SelectProperty(FeedType, default=FeedType.FEED, title='Feed Type') version = VersionProperty("1.0.2") def __init__(self): super().__init__() self._url = None self._paging_field = "paging" self._created_field = "created_time" self._access_token = None def configure(self, context): super().configure(context) lb = self._unix_time(datetime.utcnow() - self.lookback()) self._freshest = [lb] * self._n_queries def _authenticate(self): """ Overridden from the RESTPolling block. Generates and records the access token for pending requests. """ if self.creds().consumer_key() is None or \ self.creds().app_secret() is None: self.logger.error("You need a consumer key and app secret, yo") else: self._access_token = self._request_access_token() def _process_response(self, resp): """ Extract fresh posts from the Facebook graph api response object. Args: resp (Response) Returns: signals (list(Signal)): The list of signals to notify, each of which corresponds to a fresh FB post. paging (bool): Denotes whether or not paging requests are necessary. """ signals = [] resp = resp.json() fresh_posts = posts = resp['data'] paging = resp.get(self._paging_field) is not None self.logger.debug("Facebook response contains %d posts" % len(posts)) # we shouldn't see empty responses, but we'll protect our necks. if len(posts) > 0: self.update_freshness(posts) fresh_posts = self.find_fresh_posts(posts) paging = len(fresh_posts) == self.limit() # store the timestamp of the oldest fresh post for use in url # preparation later. if len(fresh_posts) > 0: self.prev_stalest = self.created_epoch(fresh_posts[-1]) signals = [FacebookSignal(p) for p in fresh_posts] self.logger.debug("Found %d fresh posts" % len(signals)) return signals, paging def _request_access_token(self): """ Request an access token directly from facebook. Args: None Returns: token (str): The access token, which goes on the end of a request. """ resp = requests.get( self.TOKEN_URL_FORMAT.format(self.creds().consumer_key(), self.creds().app_secret())) status = resp.status_code # If the token request fails, try to use the configured app id # and secret. This probably won't work, but the docs say that it # should. for more info, see: # https://developers.facebook.com/docs/facebook-login/access-tokens token = "%s|%s" % (self.creds().consumer_key(), self.creds().app_secret()) if status == 200: token = resp.text.split('access_token=')[1] else: self.logger.error("Facebook token request failed with status %d" % status) return token def _prepare_url(self, paging=False): """ Overridden from RESTPolling block. Appends the access token to the format string and builds the headers dictionary. If paging, we do some string interpolation to get our arguments into the request url. Otherwise, we append the until parameter to the end. Args: paging (bool): Are we paging? Returns: headers (dict): Contains the (case sensitive) http headers. """ headers = {"Content-Type": "application/json"} fmt = "%s&access_token=%s" % (self.URL_FORMAT, self._access_token) if not paging: self.paging_url = None feed_type = self.feed_type().value self.url = fmt.format(self.current_query, feed_type, self.freshest - 2, self.limit()) else: self.paging_url = "%s&until=%d" % (self.url, self.prev_stalest) return headers def _on_failure(self, resp, paging, url): execute_retry = True try: status_code = resp.status_code resp = resp.json() err_code = resp.get('error', {}).get('code') if (status_code == 404 and err_code in [803, 2500] or status_code == 500 and err_code == 2): # Page feed requests require only an access token [1] but user # feed requsts require a user access token with read_stream # permission [2]. # [1]: https://developers.facebook.com/docs/graph-api/ # reference/v2.2/page/feed # [2]: https://developers.facebook.com/docs/graph-api/ # reference/v2.2/user/feed self.logger.warning("Skipping feed: {}".format( self.current_query)) execute_retry = False self._increment_idx() finally: self.logger.error( "Polling request of {} returned status {}: {}".format( url, status_code, resp)) if execute_retry: self._retry(paging)
class CronTrigger(): """ Notify signals accoriding to cron-like timetable """ cron = ObjectProperty(CronConf, title='Cron Schedule', default=CronConf()) utc = BoolProperty(title='UTC', default=False) def __init__(self): super().__init__() self._job = None self._cron_specs = None def configure(self, context): super().configure(context) # TODO: check that the config is valid cron syntax self._cron_specs = [ self.cron().minute(), self.cron().hour(), self.cron().day_of_month(), self.cron().month(), self.cron().day_of_week() ] def start(self): super().start() # Like crontab, check to run jobs every minute self._job = Job(self._cron, timedelta(minutes=1), True) # Run a cron cycle immediately, but in a new thread since it # might take some time and we don't want it to hold up start spawn(self._cron) def stop(self): """ Stop the simulator thread and signal generation """ if self._job: self._job.cancel() super().stop() def _cron(self): """ Called every minute to check if cron job should notify signals """ self.logger.debug("Checking if cron emit should run") if (self.utc()): now = datetime.utcnow() else: now = datetime.now() now = [ str(now.minute), str(now.hour), str(now.day), str(now.month), str(now.weekday()) ] if self._check_cron(now): spawn(self._emit) def _check_cron(self, now): """ Return True if cron property matches with `now` `now` is list containing the 5 cron field """ for i in range(5): # '*' should match no matter what if self._cron_specs[i] == '*': now[i] = '*' # TODO: handle more interesting cron settings than just numbers and '*' return now == self._cron_specs def _emit(self): self.logger.debug("Generating signals") signals = self.generate_signals() # If a generator is returned, build the list if not isinstance(signals, list): signals = list(signals) if signals: self.logger.debug("Notifying {} signals".format(len(signals))) self.notify_signals(signals) else: self.logger.debug("No signals generated")
class TensorFlow(EnrichSignals, Block): layers = ListProperty(Layers, title='Network Layers', default=[{ 'count': 10, 'activation': 'softmax', 'initial_weights': 'random', 'bias': True }]) network_config = ObjectProperty(NetworkConfig, title='ANN Configuration', defaul=NetworkConfig()) models = ObjectProperty(ModelManagement, title='Model Management', default=ModelManagement()) version = VersionProperty("0.5.0") def __init__(self): super().__init__() self.X = None self.XX = None self.Y_ = None self.prob_keep = None self.train_step = None self.correct_prediction = None self.prediction = None self.sess = None self.loss_function = None self.saver = None self.iter = 0 self.summaries = None self.summary_writer = None def configure(self, context): super().configure(context) if self.network_config().random_seed() != None: tf.set_random_seed(self.network_config().random_seed()) # input tensor shape shape = [] for dim in self.network_config().input_dim(): if dim.value.value == -1: shape.append(None) else: shape.append(dim.value.value) self.X = tf.placeholder(tf.float32, shape=shape, name='INPUT') # specify desired output (labels) shape = [None, self.layers()[-1].count()] self.Y_ = tf.placeholder(tf.float32, shape=shape, name='LABELS') self.prob_keep = tf.placeholder(tf.float32, name='PROB_KEEP') layers_logits = {} prev_layer = self.X for i, layer in enumerate(self.layers()): name = 'layer{}'.format(i) with tf.name_scope(name): if layer.activation().value != 'dropout': flattened = 1 for dim in prev_layer.shape: if dim.value != None: flattened *= dim.value # TODO: Flatten only if not convolutional layer XX = tf.reshape(prev_layer, [-1, flattened]) W = tf.Variable(getattr(tf, layer.initial_weights().value)([ XX.shape[-1].value, layer.count() ]), name='{}_WEIGHTS'.format(name)) b = tf.Variable(getattr( tf, layer.initial_weights().value)([layer.count()]), name='{}_BIASES'.format(name)) if self.models().tensorboard_int(): with tf.name_scope('weights'): tf.summary.histogram('weights', W) with tf.name_scope('biases'): tf.summary.histogram('biases', b) if i == (len(self.layers()) - 1): # calculate logits separately for use by loss function if layer.bias.value: layers_logits[name + '_logits'] = \ tf.matmul(XX, W) + b else: layers_logits[name + '_logits'] = \ tf.matmul(XX, W) layers_logits[name] = getattr( tf.nn, layer.activation().value)(layers_logits[name + '_logits']) else: if layer.bias.value: logits = tf.matmul(XX, W) + b else: logits = tf.matmul(XX, W) layers_logits[name] = \ getattr(tf.nn, layer.activation().value)(logits) else: name = 'layer{}_d'.format(i) layers_logits[name] = tf.nn.dropout( prev_layer, self.prob_keep) prev_layer = layers_logits[name] output_layer_num = len(self.layers()) - 1 Y = layers_logits['layer{}'.format(output_layer_num)] Y_logits = layers_logits['layer{}_logits'.format(output_layer_num)] if self.network_config().loss().value == 'cross_entropy': self.loss_function = tf.reduce_mean(abs(self.Y_ * tf.log(Y))) if self.network_config().loss().value == \ 'softmax_cross_entropy_with_logits': self.loss_function = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=Y_logits, labels=self.Y_)) if self.network_config().loss().value == 'mean_absolute_error': self.loss_function = tf.reduce_mean(abs(self.Y_ - Y)) if self.models().tensorboard_int(): with tf.name_scope('loss'): tf.summary.scalar(self.network_config().loss().value, self.loss_function) self.train_step = getattr( tf.train, self.network_config().optimizer().value)( self.network_config().learning_rate()).minimize( self.loss_function) self.prediction = Y if self.models().load_file() or self.models().save_file(): self.saver = tf.train.Saver(max_to_keep=None) self.sess = tf.Session() if self.models().tensorboard_int(): label = self.models().tensorboard_tag() self.summaries = tf.summary.merge_all() self.summary_writer = tf.summary.FileWriter( '{}/{}'.format(self.models().tensorboard_dir(), label), self.sess.graph) self.logger.debug('TensorBoard summary label: {}'.format(label)) if self.models().load_file(): self.saver.restore(self.sess, self.models().load_file()) else: self.sess.run(tf.global_variables_initializer()) def process_signals(self, signals, input_id=None): new_signals = [] for signal in signals: if input_id == 'train': if self.models().tensorboard_int(): if self.iter % self.models().tensorboard_int() == 0: summary, _, loss, predict = self._train(signal) self.summary_writer.add_summary(summary, self.iter) else: _, loss, predict = self._train(signal) self.iter += 1 else: _, loss, predict = self._train(signal) output = { 'input_id': input_id, 'loss': loss, 'prediction': predict } new_signals.append(self.get_output_signal(output, signal)) elif input_id == 'test': loss, predict = self._test(signal) output = { 'input_id': input_id, 'loss': loss, 'prediction': predict } new_signals.append(self.get_output_signal(output, signal)) else: predict = self._predict(signal) output = { 'input_id': input_id, 'loss': None, 'prediction': predict } new_signals.append(self.get_output_signal(output, signal)) self.notify_signals(new_signals) def stop(self): if self.models().save_file(): self.logger.debug('saving model to {}'.format( self.models().save_file())) self.saver.save(self.sess, self.models().save_file()) if self.models().tensorboard_int(): self.summary_writer.close() self.sess.close() super().stop() def _train(self, signal): batch_X = signal.batch batch_Y_ = signal.labels fetches = [self.train_step, self.loss_function, self.prediction] dropout_rate = 1 - self.network_config().dropout() if self.models().tensorboard_int(): if self.iter % self.models().tensorboard_int() == 0: fetches = [self.summaries] + fetches return self.sess.run(fetches, feed_dict={ self.X: batch_X, self.Y_: batch_Y_, self.prob_keep: dropout_rate }) def _test(self, signal): batch_X = signal.batch batch_Y_ = signal.labels fetches = [self.loss_function, self.prediction] return self.sess.run(fetches, feed_dict={ self.X: batch_X, self.Y_: batch_Y_, self.prob_keep: 1 }) def _predict(self, signal): batch_X = signal.batch fetches = self.prediction return self.sess.run(fetches, feed_dict={ self.X: batch_X, self.prob_keep: 1 })
class EIPGetAttribute(EnrichSignals, Retry, Block): host = StringProperty(title='Hostname', default='localhost', order=0) path = ObjectProperty(ObjectPath, title='CIP Object Path', order=1) version = VersionProperty('0.2.1') def __init__(self): super().__init__() self.cnxn = None def before_retry(self, *args, **kwargs): self._disconnect() self._connect() def configure(self, context): super().configure(context) try: self._connect() except Exception: self.cnxn = None msg = 'Unable to connect to {}'.format(self.host()) self.logger.exception(msg) def process_signals(self, signals): host = self.host() outgoing_signals = [] if self.cnxn is None: try: msg = 'Not connected to {}, reconnecting...'.format(host) self.logger.warning(msg) self._connect() except Exception: self.cnxn = None msg = 'Unable to connect to {}'.format(host) self.logger.exception(msg) return for signal in signals: path = [ self.path().class_id(signal), self.path().instance_num(signal) ] if self.path().attribute_num(signal) is not None: path.append(int(self.path().attribute_num(signal))) try: value = self.execute_with_retry(self._make_request, path) except Exception: value = False self.cnxn = None msg = 'get_attribute_single failed, host: {}, path: {}' self.logger.exception(msg.format(host, path)) if value: new_signal_dict = {} new_signal_dict['host'] = host new_signal_dict['path'] = path new_signal_dict['value'] = value new_signal = self.get_output_signal(new_signal_dict, signal) outgoing_signals.append(new_signal) else: if self.cnxn is None: msg = 'Connection to {} failed.'.format(host) else: status = self.cnxn.get_status() msg = 'get_attribute_single failed, {}, host: {}, path: {}' msg = msg.format(status, host, path) self.logger.error(msg) self.notify_signals(outgoing_signals) def stop(self): self._disconnect() super().stop() def _connect(self): # each instance of CIPDriver can open connection to only 1 host # subsequent calls to open() are quietly ignored, and close() # does not take any args, so one host per block instance for now self.cnxn = CIPDriver() self.cnxn.open(self.host()) def _disconnect(self): if self.cnxn is not None: self.cnxn.close() self.cnxn = None def _make_request(self, path): return self.cnxn.get_attribute_single(*path)
class AddTimestamp(EnrichSignals, Block): utc = BoolProperty(title='UTC', default=True) output_attr = StringProperty( title='Outgoing Signal Attribute', default='timestamp', order=0, advanced=True) milliseconds = BoolProperty( title='Milliseconds', default=True, order=1, advanced=True) enrich = ObjectProperty( CustomEnrichProperties, title='Signal Enrichment', default=CustomEnrichProperties(), # use custom default order=100, advanced=True) version = VersionProperty('0.1.0') def process_signals(self, signals): current_time = self._get_current_time() output_signals = [] for signal in signals: signal_dict = { self.output_attr(signal): current_time, } output_signal = self.get_output_signal(signal_dict, signal) output_signals.append(output_signal) self.notify_signals(output_signals) def _get_current_time(self): """ Return an ISO-formatted string. Helper `_truncate_` functions are to support Python < 3.6, after which `datetime.isoformat()` takes a `timespec` arg.""" if self.utc(): now = datetime.utcnow() if not self.milliseconds(): now = self._truncate_fractional_seconds(now) current_time = now.isoformat() + 'Z' else: current_time = now.isoformat() current_time = self._truncate_microseconds(current_time) + 'Z' return current_time # get local timestamp now = datetime.now() if not self.milliseconds(): now = self._truncate_fractional_seconds(now) current_time = str(self._localize_time(now)) else: current_time = str(self._localize_time(now)) current_time = self._truncate_microseconds(current_time) # remove colon from TZ info (±HHMM format) # TODO: Add options for formats ±HH:MM, ±HH current_time = ''.join(current_time.rsplit(':', maxsplit=1)) return current_time @staticmethod def _localize_time(now): """ Return datetime `now` with local timezone.""" current_time_with_tz = get_localzone().localize(now) current_time = current_time_with_tz.isoformat() return current_time @staticmethod def _truncate_fractional_seconds(now): """ Return a datetime equal to `now` with `microsecond=0`""" now = datetime( year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, microsecond=0) return now @staticmethod def _truncate_microseconds(timestamp): """ Remove microseconds from string `timestamp`""" base, suffix = timestamp.split('.') microseconds, offset = suffix[:6], suffix[6:] milliseconds = microseconds[:3] suffix = milliseconds + offset timestamp = '.'.join([base, suffix]) return timestamp
class XeroCreateInvoice(Block): line_items = ObjectProperty(LineItems, title='Invoice Line Item', default={}) version = VersionProperty("0.1.3") consumer_key = StringProperty(title='Xero Consumer Key', default='[[XERO_CONSUMER_KEY]]', allow_none=False) contact_name = StringProperty(title='Contact Name (Stripe customerID)', default='{{ $customer }}') def __init__(self): self.xero = None self.credentials = None super().__init__() def configure(self, context): super().configure(context) con_key = self.consumer_key() with open('blocks/xero/keys/privatekey.pem') as keyfile: rsa_private_key = keyfile.read() self.credentials = PrivateCredentials(con_key, rsa_private_key) self.xero = Xero(self.credentials) def start(self): super().start() def process_signals(self, signals): response_signal = [] for signal in signals: response_signal.append( Signal( self.xero.invoices.put({ 'Type': self.line_items().invoice_type(), # ACCREC 'Contact': { 'Name': self.contact_name( signal) # Stripe customer ('cus_000...') }, 'DueDate': datetime.utcnow() + timedelta(days=30), 'LineItems': [{ 'Description': self.line_items().description(signal), 'Quantity': self.line_items().quantity(signal), 'UnitAmount': self.line_items().unit_amount(signal), 'TaxAmount': self.line_items().tax_amount(signal), 'TaxType': 'NONE', 'AccountCode': self.line_items().invoice_account_code() }], 'Status': 'SUBMITTED' })[0])) self.notify_signals(response_signal)
class TwilioVoice(TerminatorBlock): recipients = ListProperty(Recipient, title='Recipients', default=[]) creds = ObjectProperty(TwilioCreds, title='Credentials') from_ = StringProperty(default='[[TWILIO_NUMBER]]', title='From') url = StringProperty(default='', title='Callback URL') message = Property( default='An empty voice message', title='Message') port = IntProperty(title='Port', default=8184) host = StringProperty(title='Host', default='[[NIOHOST]]') endpoint = StringProperty(title='Endpoint', default='') version = VersionProperty("1.0.0") def __init__(self): super().__init__() self._client = None self._messages = {} self._server = None def configure(self, context): super().configure(context) self._client = TwilioRestClient(self.creds().sid, self.creds().token) conf = { 'host': self.host(), 'port': self.port() } self.configure_server(conf, Speak(self.endpoint(), self)) def start(self): super().start() # Start Web Server self.start_server() def stop(self): super().stop() # Stop Web Server self.stop_server() def process_signals(self, signals): for s in signals: self._place_calls(s) def _place_calls(self, signal): try: msg = self.message(signal) msg_id = uuid4().hex self._messages[msg_id] = msg for rcp in self.recipients(): spawn(target=self._call, recipient=rcp, message_id=msg_id) except Exception as e: self.logger.error( "Message evaluation failed: {0}: {1}".format( type(e).__name__, str(e)) ) def _call(self, recipient, message_id, retry=False): try: # Twilio sends back some useless XML. Don't care. to = recipient.number, from_ = self.from_(), url = "%s?msg_id=%s" % (self.url(), message_id) self.logger.debug("Making call to {}, from {}, with callback url" " {}".format(to, from_, url)) self._client.calls.create( to=to, from_=from_, url=url ) except TwilioRestException as e: self.logger.error("Status %d" % e.status) if not retry: self.logger.debug("Retrying failed request") self._call(recipient, message_id, True) else: self.logger.error("Retry request failed") except Exception as e: self.logger.error("Error sending voice {}: {}".format( recipient, e ))
class Email(TerminatorBlock): """ A block for sending email. Properties: to (list(Identity)): A list of recipient identities (name/email). server (SMTPConfig): host, port, account, etc. for SMTP server. message (Message): The message contents and sender name. """ version = VersionProperty("0.1.0") to = ListProperty(Identity, title='Receiver', default=[]) server = ObjectProperty(SMTPConfig, title='Server', allow_none=False) message = ObjectProperty(Message, title='Message', allow_none=True) def __init__(self): super().__init__() self._retry_conn = None def process_signals(self, signals): """ For each signal object, build the configured message and send it to each recipient. Note that this method does not return until all of the messages are successfully sent (i.e. all the sendmail threads have exited). This avoids dropped messages in the event that the disconnect thread gets scheduled before all sendmail threads are complete. Args: signals (list(Signal)): The signals to process. Returns: None """ # make a new connection to the SMTP server each time we get a new # batch of signals. smtp_conn = SMTPConnection(self.server(), self.logger) try: smtp_conn.connect() except: self.logger.error('Aborting sending emails. ' '{} signals discarded'.format(len(signals))) return # handle each incoming signal for signal in signals: try: subject = self.message().subject(signal) except Exception as e: subject = self.get_defaults()['message'].subject() self.logger.error( "Email subject evaluation failed: {0}: {1}".format( type(e).__name__, str(e))) try: body = self.message().body(signal) except Exception as e: body = self.get_defaults()['message'].body() self.logger.error( "Email body evaluation failed: {0}: {1}".format( type(e).__name__, str(e))) self._send_to_all(smtp_conn, subject, body, signal) # drop the SMTP connection after each round of signals smtp_conn.disconnect() def _send_to_all(self, conn, subject, body, signal): """ Build a message based on the provided content and send it to each of the configured recipients. Args: conn (SMTPConnection): The connection over which to send the message. subject (str): The desired subject line of the message. body (str): The desired message body. Returns: None """ sender = self.message().sender() msg = self._construct_msg(subject, body) for rcp in self.to(): # customize the message to each recipient msg['To'] = rcp.name(signal) try: conn.sendmail(sender, rcp.email(signal), msg.as_string()) self.logger.debug("Sent mail to: {}".format(rcp.email(signal))) except Exception as e: self.logger.error("Failed to send mail: {}".format(e)) def _construct_msg(self, subject, body): """ Construct the multipart message. Mail clients unable to render HTML will default to plaintext. Args: subject (str): The subject line. body (str): The message body. Returns: msg (MIMEMultipart): A message containing generic headers, and HTML version, and a plaintext version. """ msg = MIMEMultipart('alternative') msg['Subject'] = subject msg['From'] = self.message().sender() plain_part = MIMEText(body, 'plain') msg.attach(plain_part) html_part = MIMEText(HTML_MSG_FORMAT.format(body), 'html') msg.attach(html_part) return msg