from krules_core import RuleConst as Const, messages import requests import os ## ENABLE RESULTS ########## from krules_core.providers import results_rx_factory from krules_env import publish_results_all, publish_results_errors import pprint # results_rx_factory().subscribe( # on_next=pprint.pprint # ) results_rx_factory().subscribe(on_next=publish_results_all, ) rulename = Const.RULENAME subscribe_to = Const.SUBSCRIBE_TO ruledata = Const.RULEDATA filters = Const.FILTERS processing = Const.PROCESSING rulesdata = [ """ On status NORMAL notify """, { rulename: "on-temp-status-back-to-normal-websocket-notifier", subscribe_to: "temp-status-back-to-normal", ruledata: { processing: [
filters = Const.FILTERS processing = Const.PROCESSING from krules_core.providers import results_rx_factory from krules_env import publish_results_errors, publish_results_all, publish_results_filtered from dateutil.parser import parse # import pprint # results_rx_factory().subscribe( # on_next=pprint.pprint # ) # results_rx_factory().subscribe( # on_next=publish_results_all, # ) results_rx_factory().subscribe(on_next=publish_results_errors, ) rulesdata = [ """ Just emit "data-received" event Data should be handled by application specific logic """, { rulename: "on-data-received-propagate", subscribe_to: "com.google.cloud.pubsub.topic.publish", ruledata: { processing: [ SetSubjectPropertySilently("lastSeen", datetime.now().isoformat()), B64Decode(source=lambda payload: payload["message"]["data"], payload_dest="data"),
import pprint from krules_mongodb import WithDatabase, WithCollection, MongoDBInsertOne, MongoDBUpdateOne, MongoDBFind, \ MongoDBDeleteByIds from base_functions import SCHEDULE_MESSAGE, Schedule from pymongo import IndexModel, HASHED, TEXT # results_rx_factory().subscribe( # on_next=pprint.pprint # ) # results_rx_factory().subscribe( # on_next=lambda result: publish_results_filtered(result, "$.rule_name", "on-schedule-received") # ) results_rx_factory().subscribe(on_next=lambda result: publish_results_filtered( result, "$.._ids_deleted_count", lambda x: x and x > 0)) rulename = Const.RULENAME subscribe_to = Const.SUBSCRIBE_TO ruledata = Const.RULEDATA filters = Const.FILTERS processing = Const.PROCESSING DBNAME = "kr-dev-01" COLLECTION = "scheduler" INDEXES = [IndexModel([("message", TEXT), ("subject", TEXT)])] rulesdata = [ """ Store schedule info """,
from geopy import distance from geopy.geocoders import Nominatim rulename = Const.RULENAME subscribe_to = Const.SUBSCRIBE_TO ruledata = Const.RULEDATA filters = Const.FILTERS processing = Const.PROCESSING from krules_core.providers import results_rx_factory from krules_env import publish_results_errors, publish_results_all, publish_results_filtered import pprint results_rx_factory().subscribe( on_next=pprint.pprint ) # results_rx_factory().subscribe( # on_next=publish_results_all, # ) # results_rx_factory().subscribe( # on_next=publish_results_errors, # ) geolocator = Nominatim(user_agent="KRules", timeout=10) class SetLocationProperties(RuleFunctionBase): def execute(self):
ruledata = Const.RULEDATA filters = Const.FILTERS processing = Const.PROCESSING from krules_core.providers import results_rx_factory, subject_factory, message_router_factory from krules_env import publish_results_errors, publish_results_all, publish_results_filtered from cloudstorage.drivers.google import GoogleStorageDriver from app_functions.cloudstorage.csv import ProcessCSV_AsDict from app_functions.cloudstorage import DeleteBlob import pprint # results_rx_factory().subscribe( # on_next=pprint.pprint # ) results_rx_factory().subscribe(on_next=lambda result: publish_results_filtered( result, "$.processed", True)) # results_rx_factory().subscribe( # on_next=publish_results_errors, # ) rulesdata = [ """ Subscribe to storage, import csv """, { rulename: "on-csv-upload-import-devices", subscribe_to: "com.google.cloud.storage.object.finalize", ruledata: { filters: [ CheckSubjectMatch( "onboarding/import/(?P<deviceclass>.+)/(?P<filename>.+)",