import sys from azure.servicebus import ServiceBusService, Message sbs = ServiceBusService( "magicButtonMB", shared_access_key_name="RootManageSharedAccessKey", shared_access_key_value="SNZewWOTdOkaVShECrp+OCKmSFqW5JCEjEM52slp5TM=") msg = Message((sys.argv[2]).encode()) sbs.send_queue_message(sys.argv[1], msg)
#Brandon J. McIntyre from azure.servicebus import ServiceBusService, Message, Queue import datetime import threading import time, math import random import socket from bottle import route, run hostname = socket.gethostname() hostport = 2505 bus_service = ServiceBusService( service_namespace='servicebusn2', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='0+T7xifpuFJ4HFCodk7D6E9bjyq0imsDE3NRzF2SMdE=') ProductName = ['Financial Trap', 'Failure', 'No Trap'] class myThread(threading.Thread): def __init__(self, threadID, name, counter): super(myThread, self).__init__() # threading.Thread.__init__(self) self.threadID = threadID self.name = name self.counter = counter def run(self): print "Starting " + self.name print "\n" data = {
from config.config import queueConf, azure_context, DATABASE_URI, ACI_CONFIG from azure.servicebus import ServiceBusService, Message, Queue from azure.mgmt.resource import ResourceManagementClient from azure.mgmt.containerinstance import ContainerInstanceManagementClient from azure.mgmt.containerinstance.models import ( ContainerGroup, Container, ContainerPort, Port, IpAddress, EnvironmentVariable, ResourceRequirements, ResourceRequests, ContainerGroupNetworkProtocol, OperatingSystemTypes) resource_client = ResourceManagementClient(azure_context.credentials, azure_context.subscription_id) client = ContainerInstanceManagementClient(azure_context.credentials, azure_context.subscription_id) bus_service = ServiceBusService( service_namespace=queueConf['service_namespace'], shared_access_key_name=queueConf['saskey_name'], shared_access_key_value=queueConf['saskey_value']) BASE_NAMES = deque([ "anders", "wenjun", "robbie", "robin", "allen", "tony", "xiaofeng", "tingting", "harry", "chen" ]) NAMES_COUNTER = 0 IMAGE = "pskreter/worker-container:latest" def main(): sys.stdout.write("Starting Work Cycle...\n") # same as print sys.stdout.flush() while True: try:
__author__ = 'daltrogama' import json, datetime from azure.servicebus import ServiceBusService, Message, Queue bus_service = ServiceBusService( service_namespace='csgriddaltro', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='t2lyxPaILuLgGxmzyKIBPWpYgPDL1cotvoLNw0qxBL4=') json_message = """{ "command_id": "[email protected]", "algorithm_prfx": "algorithms/TATU/versions/v_001_000_000/bin/Linux26g4_64", "project_prfx": "/AzureBlobStorage/sandbox/admin/tester/admi_test_CBJ6LTHDWM", "project_input_files": [".cmds/admi_test_CBJ6LTHDWM/cmd.parameters", "tatu-a-b-lab-times.pdf", ".cmds/admi_test_CBJ6LTHDWM/script.ksh", ".cmds/admi_test_CBJ6LTHDWM/cmd.properties"], "algorithm_executable_name": "/bin/ksh", "algorithm_parameters": ["/AzureBlobStorage/sandbox/admin/tester/admi_test_CBJ6LTHDWM/.cmds/admi_test_CBJ6LTHDWM/script.ksh"], "sent_timestamp": "12/01/2015 10:00:00", "machine_size": "*" }""" # # job_description = json.loads(json_message) # # command_id = job_description["command_id"], # algorithm_bin_file = job_description["algorithm_bin_file"], # project_prfx = job_description["project_prfx"], # project_input_files = job_description["project_input_files"], # algorithm_executable_name = job_description["algorithm_executable_name"], # algorithm_parameters = job_description["algorithm_parameters"],
storage_account = CloudStorageAccount(STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_KEY) blob_service = storage_account.create_blob_service() table_service = storage_account.create_table_service() queue_service = storage_account.create_queue_service() # # Service Bus is a messaging solution for applications. It sits between # components of your applications and enables them to exchange messages in a # loosely coupled way for improved scale and resiliency. # # See http://go.microsoft.com/fwlink/?linkid=246934 for Service Bus documentation. # SERVICE_BUS_NAMESPACE = '__paste_your_service_bus_namespace_here__' SERVICE_BUS_KEY = '__paste_your_service_bus_key_here__' bus_service = ServiceBusService(SERVICE_BUS_NAMESPACE, SERVICE_BUS_KEY, issuer='owner') if __name__ == '__main__': while True: # # Write your worker process here. # # You will probably want to call a blocking function such as # bus_service.receive_queue_message('queue name', timeout=seconds) # to avoid consuming 100% CPU time while your worker has no work. # sleep(1.0)
from azure.servicebus import ServiceBusService, Message, Queue import os import glob from azure.storage.blob import BlockBlobService, PublicAccess from PIL import Image bus_service = ServiceBusService( service_namespace=os.environ['SERVICEBUS_NAMESPACE'], shared_access_key_name=os.environ['SERVICEBUS_ACCESSKEY_NAME'], shared_access_key_value=os.environ['SERVICEBUS_ACCESSKEY']) block_blob_service = BlockBlobService(account_name=os.environ['STORAGE_ACCOUNT'], account_key=os.environ['STORAGE_KEY']) container_name ='images' block_blob_service.create_container(container_name) generator = block_blob_service.list_blobs(container_name) for blob in generator: if (block_blob_service.exists("thumbs", blob.name) == False): print("Resizing: " + blob.name) block_blob_service.get_blob_to_path("images", blob.name, blob.name) size = 150, 150 im = Image.open(blob.name) im.thumbnail(size) im.save(blob.name, "JPEG") block_blob_service.create_blob_from_path("thumbs", blob.name, blob.name) os.remove(blob.name)
from azure.servicebus import ServiceBusService, Message, Queue print('start') bus_service = ServiceBusService( service_namespace='PuIOTbus', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='SAnp//UKmcm29urWfQy8sdvh3Ipf8/dD7qlN3ZIsWjM=') #print('send') #msg = Message(b'Test Message') #bus_service.send_queue_message('iotqueue', msg) print('recieve') msg = bus_service.receive_queue_message('iotqueue', peek_lock=False) print(msg.body)
def _process(video): """ This route will perform 3 steps: 1. split video into frames directory and audio file 2. add frames into service bus queue 3. this function will poll storage until the number of input images matches the number of processed images 4. download processed frames and stitch video back together """ # get varaibles from environment namespace = os.getenv("SB_NAMESPACE") queue = os.getenv("SB_QUEUE") sb_key_name = os.getenv("SB_SHARED_ACCESS_KEY_NAME") sb_key_value = os.getenv("SB_SHARED_ACCESS_KEY_VALUE") mount_dir = os.getenv("MOUNT_DIR", "data") terminate = os.getenv("TERMINATE") # start time t0 = time.time() # set video name video_name = video.split(".")[0] input_dir = os.path.join(mount_dir, video_name, "input_frames") output_dir = os.path.join(mount_dir, video_name, "output_frames") audio_file = os.path.join(mount_dir, video_name, "audio.aac") # create parent directory in mount_dir if not os.path.exists(os.path.join(mount_dir, video_name)): os.makedirs(os.path.join(mount_dir, video_name)) # setup logger handler_format = get_handler_format() console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(handler_format) log_file = "{}.log".format(video_name) file_handler = RotatingFileHandler(os.path.join(mount_dir, video_name, log_file), maxBytes=20000) file_handler.setFormatter(handler_format) logger = logging.getLogger("root") logger.setLevel(logging.DEBUG) logger.addHandler(console_handler) logger.addHandler(file_handler) logger.propagate = False # process video and upload output frames and audio file to blob logger.debug("Preprocessing video {}".format(video)) preprocess(video=video, mount_dir=mount_dir) t1 = time.time() # service bus client bus_service = ServiceBusService( service_namespace=namespace, shared_access_key_name=sb_key_name, shared_access_key_value=sb_key_value, ) # add all images from frame_dir to the queue logger.debug("Adding images from {} to queue {}".format(input_dir, queue)) image_count = add_images_to_queue( mount_dir=mount_dir, queue=queue, video_name=video_name, bus_service=bus_service, ) t2 = time.time() # terminate if testing if terminate: exit(0) # poll storage for output logger.debug( "Polling for input images {} to equal output images {}".format( input_dir, output_dir)) while True: path, dirs, files = next(os.walk(output_dir)) output_frames_length = len(files) if output_frames_length == image_count: t3 = time.time() # postprocess video logger.debug( "Stitching video together with processed frames dir '{}' and audio file '{}'." .format(output_dir, audio_file)) postprocess(video_name=video_name, mount_dir=mount_dir) t4 = time.time() break else: logger.debug( "Images are still processing. Retrying in 10 seconds...") time.sleep(10) continue t5 = time.time() logger.debug( "Preprocessing video finished.... Time taken in seconds: {:.2f}". format(t1 - t0)) logger.debug( "Adding image to queue finished.. Time taken in seconds: {:.2f}". format(t2 - t1)) logger.debug( "Detecting vehicles.............. Time taken in seconds: {:.2f}". format(t3 - t2)) logger.debug( "Postprocessing video finished... Time taken in seconds: {:.2f}". format(t4 - t3)) logger.debug( "Total process................... Time taken in seconds: {:.2f}". format(t5 - t0))
#Raspberry PI constants dht_sensor_port = 7 #Digital D7 dht_sensor_type = 0 sound_sensor = 0 #Analog A0 light_sensor = 1 #Analog A1 ultrasonic_ranger = 4 #Digital D4 #Azure constants key_name = "RootManageSharedAccessKey" key_value = "" event_hub_namespace = "" event_hub_name = "" sbs = ServiceBusService(service_namespace=event_hub_namespace, shared_access_key_name=key_name, shared_access_key_value=key_value) while True: sleep(0.5) soundLevel = grovepi.analogRead(sound_sensor) sleep(0.5) lightLevel = grovepi.analogRead(light_sensor) sleep(0.5) distance = ultrasonicRead(ultrasonic_ranger) sleep(0.3) [temp, hum] = dht(dht_sensor_port, dht_sensor_type)
import json from time import time, sleep from datetime import datetime,timedelta """ Install Azure Service Bus Client in Python pip install azure-servicebus """ """ For Python 3 use the below command to import the Azure service bus from azure.servicebus.control_client import ServiceBusService """ from azure.servicebus import ServiceBusService #For Python 2 # Establishing Connection with Event Hub sbs = ServiceBusService(service_namespace='Your Created Namespace', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='Extracted Key from Connection String'') shop_items=['Noodles','Flour','Rice','Pancake Mix','Toilet Soap','Ball Pen','Shampoo','Olive Oil','Banana','Pomegranate','Marshmallows','Tropicana','Hair Oil','Room Freshener','Deodorant','Incense Sticks','Coffee Beans','Chamonile Tea','Marker','Napkins'] payment = ['Cash','Debit Card','Credit Card','PayTm'] tr_data={} df_final=pd.DataFrame() count = 0 for j in range(1,20000): count = count +1 print(count) df=pd.DataFrame() for i in range(random.randint(1,20)): tr_data_i={} tr_data_i['cust_id']=j
from azure.servicebus import ServiceBusService, Message, Queue from multiprocessing import Pool import thread from azure.storage.table import TableService, Entity import json bus_service = ServiceBusService( service_namespace='rossamrit', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='WQ8H90SmQA7OZEux1jpGP7bh4gZBJuVcJzooReElV74=') table_service = TableService( account_name='amrittable', account_key= 'Cgqr5ZSRv/K/9ftaeQMMHzQcEL06VlyulbDtqnXD0eb6QujMkijdYzk0m37kJaswlyyVgGd9zyPw8P0ArdZy2Q==' ) msgl = "" msgl2 = "" def hello_world(name): while 1: try: msg = bus_service.receive_queue_message('ross', peek_lock=True) msg1 = msg.body msgl2 = json.loads(msg1) print(msgl2["UserId"]) print(msgl2["SellerID"]) print(msgl2["Product Name"]) print(msgl2["Sale Price"])
import json, tweepy, os, sys, getopt, codecs import config from azure.servicebus import ServiceBusService from tweepy import OAuthHandler from time import sleep #Variables de configuration sbs = ServiceBusService(service_namespace=config.servns, shared_access_key_name=config.key_name, shared_access_key_value=config.key_value) # Create a ServiceBus Service Object max_tweets = 20 arg_query = '' arg_tweet_id = 10000 arg_time = 60 auth = OAuthHandler(config.consumer_key, config.consumer_secret) auth.set_access_token(config.access_token, config.access_secret) api = tweepy.API(auth) opts, args = getopt.getopt(sys.argv[1:],'q:t:') for opt, arg in opts: # en travaux : changer entre mode requete et timeline if opt == '-q': arg_query = arg elif opt == '-t': arg_time = float(arg)
from azure.servicebus import ServiceBusService, Message, Queue bus_service = ServiceBusService( service_namespace='debrisbot-ns', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='QPxUGbIzO33d5oeTjv3NRPe+DLkzP+f+lPwwken6K00=') #bus_service.create_queue('botqueue') while (True): msg = bus_service.receive_queue_message('botqueue', peek_lock=False) if (msg): msg = msg.body msg = msg.split(' ')[1] print(msg)
def start_processing(): print( f"Connecting to Event Hub: {EVENT_HUB_NAME} within Event Hub Namespace {EVENT_HUB_NAMESPACE}" ) # Create Event Hub client sbs = ServiceBusService(service_namespace=EVENT_HUB_NAMESPACE, shared_access_key_name=EVENT_HUB_SAS_NAME, shared_access_key_value=EVENT_HUB_SAS_KEY) # Create blob access client block_blob_service = BlockBlobService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_SAS_KEY) print(f"Connecting to Blob Storage: {STORAGE_ACCOUNT_NAME}") print(f"Using path prefix: {PATH_PREFIX}") # List all blobs in container blobs = block_blob_service.list_blobs(STORAGE_CONTAINER_NAME, prefix=PATH_PREFIX) print(f"Send message interval (ms): {MESSAGE_INTERVAL_S}") # Iterate over each blob and send to EventHub for blob in blobs: # Verify file has a .json extension # TODO: Support "avro" later if '.json' in blob.name: # content_length == 508 is an empty file, so only process content_length > 508 (skip empty files) if blob.properties.content_length > 508: # Ensure file is JSON # ['2017/06/28/15/0_248b3c7cb64342418a302475921f6665_1', 'json'] if blob.name.split('.')[1] == 'json': print('Downloading blob: ' + blob.name) print('This may take a while depending on filesize.') # Download blob # blob = block_blob_service.get_blob_to_text(STORAGE_CONTAINER_NAME, blob.name) blob = block_blob_service.get_blob_to_bytes( STORAGE_CONTAINER_NAME, blob.name) print('Downloaded blob: ' + blob.name) try: blob.state = "downloaded" # Parse blob messages = json.loads(blob.content) blob.state = "parsed" messages_count = len(messages) print( f'Sending {messages_count} messages for {blob.name}' ) blob.state = "sending" # Split blob into individual messags for index, message in enumerate(messages): formatted_message = json.dumps(message) # Send to Event Hub (Add +1 for clarity on message number) current_message_num = (index + 1) print( f"Found Message {current_message_num} out of {messages_count} for {blob.name}" ) # Send blob data to Event Hub sbs.send_event(EVENT_HUB_NAME, formatted_message) # Uncomment this line if you'd like message sending logging # print(f"Sending message: {formatted_message}") # TODO: This does not account for any throttling Event Hub may do if data exceeds plan throughput # Sleep after every sent message time.sleep(MESSAGE_INTERVAL_S) except: print( f'Error parsing: {blob.name} while in state: {blob.state}' ) print(f'Skipping: {blob.name}') print('Finished blob: ' + blob.name)
def __init__(self, namespace, keyname, keyval): self._client = ServiceBusService(service_namespace=namespace, shared_access_key_name=keyname, shared_access_key_value=keyval)
assert args.namespace is not None assert args.queue is not None assert args.sb_key_name is not None assert args.sb_key_value is not None assert args.storage_mount_dir is not None assert args.video_name is not None # setup logger handler_format = get_handler_format() console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(handler_format) logger = logging.getLogger("root") logger.setLevel(logging.DEBUG) logger.addHandler(console_handler) logger.propagate = False bus_service = ServiceBusService( service_namespace=args.namespace, shared_access_key_name=args.sb_key_name, shared_access_key_value=args.sb_key_value, ) add_images_to_queue( mount_dir=args.storage_mount_dir, queue=args.queue, video_name=args.video_name, bus_service=bus_service, queue_limit=args.queue_limit, )
from azure.servicebus import ServiceBusService import perftest with open( os.path.abspath( os.path.join(os.path.dirname(__file__), '../config.json')), 'r') as read_file: config = json.load(read_file) topic_name = config['topic_name'] subscription_name = config['subscription_name'] service_namespace = config['service_namespace'] key_name = config['key_name'] key_value = config['key_value'] bus_service = ServiceBusService(service_namespace=service_namespace, shared_access_key_name=key_name, shared_access_key_value=key_value) bus_service.create_subscription(topic_name, subscription_name) consumer = perftest.PerfConsumerSync(bus_service) consumer.start() for i in range(1, 30): time.sleep(1) print("consumer received " + consumer.rate.print_rate()) consumer.stop()
import json from azure.servicebus import ServiceBusService import time # wells: bopd = 1062 mcfd = 12338 bwpd = 3170 pressure = 3830 max_level = 13.5 creds = json.load(open('/home/chaos/password.json')) json.dump({"state": True}, open('armageddon.json', 'w')) sbs = ServiceBusService(service_namespace='chaosMonkeys', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value=creds['password']) wells = [{ 'name': 'well1', 'location': { 'lat': 36.127927, 'long': -97.678902 }, 'multiplier': 1 }, { 'name': 'well2', 'location': { 'lat': 36.128802, 'long': -97.681702 },
from azure.servicebus import ServiceBusService, Message import time def dump(obj): for attr in dir(obj): print("obj.%s = %s" % (attr, getattr(obj, attr))) bus_service = ServiceBusService( service_namespace='lumi001', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='AFOA06OWcNUsDQyi8jHuqIhumuV5QN+jObsfv5QhNBM=', host_base='.servicebus.chinacloudapi.cn') queue_name = 'temp_queue' # queue_name = 'wechat.subscription.exp' # bus_service.create_queue('temp_queue') for i in range(15): msg_str = 'Follow#oQ_DVvyaYU_iFEpJCI5WW9lLkvG4@%s' % (time.ctime( time.time())) if i % 2 == 0: msg_str = 'UnFollow#oQ_DVvyaYU_iFEpJCI5WW9lLkvG4@%s' % (time.ctime( time.time())) msg = Message(str.encode(msg_str)) print('push to queue: ', msg_str) bus_service.send_queue_message(queue_name, msg) time.sleep(1) # time.sleep(1) # # message = bus_service.receive_queue_message(queue_name)
import time import sys import socket from azure.servicebus import ServiceBusService key_name = "key_name" key_value = "key" sbs = ServiceBusService("flightdata", shared_access_key_name=key_name, shared_access_key_value=key_value) #!/usr/bin/env python import socket TCP_IP = '127.0.0.1' TCP_PORT = 30003 BUFFER_SIZE = 1024 while (True): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((TCP_IP, TCP_PORT)) data = s.recv(BUFFER_SIZE) s.close() msg = str(data) msg = msg[2:] msg = msg[:-5] list = msg.split(",") result = ""
def mqttsub_agent(config_path, **kwargs): config = utils.load_config(config_path) def get_config(name): try: kwargs.pop(name) except KeyError: return config.get(name, '') service_namespace = settings.AZURE['servicebus']['service_namespace'] shared_access_key_name = settings.AZURE['servicebus'][ 'shared_access_key_name'] shared_access_key_value = settings.AZURE['servicebus'][ 'shared_access_key_value'] servicebus_topic = settings.gateway_id sbs = ServiceBusService(service_namespace=service_namespace, shared_access_key_name=shared_access_key_name, shared_access_key_value=shared_access_key_value) # DATABASES db_host = settings.DATABASES['default']['HOST'] db_port = settings.DATABASES['default']['PORT'] db_database = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_password = settings.DATABASES['default']['PASSWORD'] gateway_id = settings.gateway_id class mqttsubAgent(Agent): """Listens to everything and publishes a heartbeat according to the heartbeat period specified in the settings module. """ def __init__(self, config_path, **kwargs): super(mqttsubAgent, self).__init__(**kwargs) self.config = utils.load_config(config_path) @Core.receiver('onsetup') def onsetup(self, sender, **kwargs): # Demonstrate accessing a value from the config file _log.info(self.config.get('message', DEFAULT_MESSAGE)) self._agent_id = self.config.get('agentid') @Core.receiver('onstart') def onstart(self, sender, **kwargs): _log.debug("VERSION IS: {}".format(self.core.version())) while True: try: servicebus_topic = 'hivedevhub7' print servicebus_topic msg = sbs.receive_subscription_message(servicebus_topic, 'client1', peek_lock=False) print msg if msg.body is not None: commsg = eval(msg.body) print commsg # print("message MQTT received datas") type_msg = str(commsg.get('type', None)) if type_msg.startswith( 'scene'): # TODO : Recheck condition again # print('Found scene') self.VIPPublishApplication(commsg, type_msg) elif type_msg == 'devicecontrol': # Execute Device Control Function # print("Device Cintrol Event") self.VIPPublishDevice(commsg) elif type_msg == 'login': self.VIPPublishApplication(commsg, type_msg) # TODO : Pub message again to Notifier agent to Store TOKEN VALUE self.vip.pubsub.publish( 'pubsub', '/ui/agent/update/notifier', message=json.dumps(commsg), ) home_path = expanduser("~") json_path = '/workspace/hive_os/volttron/token.json' automation_control_path = home_path + json_path launcher = json.load( open(home_path + json_path, 'r')) # load config.json to variable # Update new agentID to variable (agentID is relate to automation_id) self.updatetoken(commsg) elif type_msg == 'automationcreate': # Execute Create Automation Function # print("Create Automation Event") self.VIPPublishApplication(commsg, type_msg) elif type_msg == 'automationdelete': # Execute Delete Automation Function # print("Delete Automation Event") self.VIPPublishApplication(commsg, type_msg) elif type_msg == 'automationupdate': # Execute Update Automation Function # print("Update Automation Event") self.VIPPublishApplication(commsg, type_msg) else: pass # print "---------------------------------------" # print('Any Topic :') # print servicebus_topic # print commsg # print "---------------------------------------" else: pass # print servicebus_topic # print "No body message" except Exception as er: print er def updatetoken(self, commsg): try: conn = psycopg2.connect(host=db_host, port=db_port, database=db_database, user=db_user, password=db_password) self.conn = conn self.cur = self.conn.cursor() self.cur.execute("""SELECT * FROM token """) rows = self.cur.fetchall() nullrow = True for row in rows: if row[0] == gateway_id: nullrow = False self.api_token = row[1] self.conn.close() self.conn = psycopg2.connect(host=db_host, port=db_port, database=db_database, user=db_user, password=db_password) self.cur = self.conn.cursor() if nullrow == True: self.cur.execute( """INSERT INTO token (gateway_id, login_token, expo_token) VALUES (%s, %s, %s);""", (servicebus_topic, commsg['token'], commsg['token'])) self.cur.execute( """ UPDATE token SET login_token=%s, expo_token=%s WHERE gateway_id=%s """, (commsg['token'], commsg['token'], servicebus_topic)) self.conn.commit() self.conn.close() except Exception as er: print("Error in insertdb : {}".format(er)) def VIPPublishDevice(self, commsg): # TODO this is example how to write an app to control AC topic = str('/ui/agent/update/hive/999/' + str(commsg['device'])) message = json.dumps(commsg) # print ("topic {}".format(topic)) # print ("message {}".format(message)) self.vip.pubsub.publish('pubsub', topic, {'Type': 'HiVE Device to Gateway'}, message) def VIPPublishApplication(self, commsg, type_msg): topic = str('/ui/agent/update/hive/999/') + str(type_msg) message = json.dumps(commsg) # print ("topic {}".format(topic)) # print ("message {}".format(message)) self.vip.pubsub.publish('pubsub', topic, {'Type': 'HiVE Application to Gateway'}, message) Agent.__name__ = 'mqttsubAgent' return mqttsubAgent(config_path, **kwargs)
from datetime import datetime from azure.mgmt.iothub import IotHubClient from azure.servicebus import ServiceBusService, Message, Queue os.system('modprobe w1-gpio') os.system('modprobe w1-therm') base_dir = '/sys/bus/w1/devices/' device1_file = glob.glob(base_dir + '28*')[0] + '/w1_slave' device2_file = glob.glob(base_dir + '28*')[1] + '/w1_slave' key_name = "RootManageSharedAccessKey" key_value = "MFIHIYoS7QmgaATst7IXAn272URV71y8+XT66GJZBiY=" sbs = ServiceBusService("BraneyBI", shared_access_key_name=key_name, shared_access_key_value=key_value) sbs.create_queue("piQueue") def read_temp_raw(dfile): f = open(dfile, 'r') lines = f.readlines() f.close() return lines def read_temp(dfile): lines = read_temp_raw(dfile) while lines[0].strip()[-3:] != 'YES': time.sleep(0.2)
def __init__(self, namespace, key, issuer, name): self.service = ServiceBusService(service_namespace=namespace, account_key=key, issuer=issuer) #self.service = ServiceBusService(service_namespace=namespace, shared_access_key_value=key, shared_access_key_name=issuer) self.name = name self.max_retries = 3 self.wait = lambda count: 1.0*(2**count)
# Create storage service from azure.storage import CloudStorageAccount storage_account = CloudStorageAccount( account_name=app.config['STORAGE_ACCOUNT_NAME'], account_key=app.config['STORAGE_ACCOUNT_KEY']) block_blob_service = storage_account.create_block_blob_service() # Create container from azure.storage.blob import PublicAccess block_blob_service.create_container('images', public_access=PublicAccess.Container) # Create service bus service from azure.servicebus import ServiceBusService, Message, Queue bus_service = ServiceBusService( service_namespace=app.config['SERVICEBUS_NAMESPACE'], shared_access_key_name=app.config['SERVICEBUS_ACCESS_KEYNAME'], shared_access_key_value=app.config['SERVICEBUS_ACCESS_KEYVALUE']) # Create queue bus_service.create_queue('adqueue', None, False) def CreateAdBlob(file): filename = RandomString(12) + splitext((file.filename))[1] block_blob_service.create_blob_from_stream('images', filename, file.stream) imageURL = 'https://' + app.config[ 'STORAGE_ACCOUNT_NAME'] + '.blob.core.windows.net/images/' + filename return imageURL def DeleteAdBlob(ad):
required=True, help='Set the service state') parser.add_argument('-T', '--summary', type=str, required=True, help='Set the summary') parser.add_argument('-D', '--datetime', type=str, required=True, help='Set the datetime') args = parser.parse_args() sbs = ServiceBusService(os.getenv('SERVICE_NAMESPACE'), shared_access_key_name=os.getenv('KEY_NAME'), shared_access_key_value=os.getenv('KEY_VALUE')) msg = Message(body="SQLGENERIC CRITICAL - 1", custom_properties={ 'notificationtype': args.notificationtype, 'hostname': args.hostname, 'service': args.service, 'address': args.address, 'state': args.state, 'summary': args.summary, 'datetime': args.datetime }) sbs.send_topic_message(os.getenv('TOPIC'), msg)
from azure.servicebus import ServiceBusService from azure.servicebus import Message sbs = ServiceBusService( service_namespace='SERVICEBUS_NAMESPACE', # namespace name as created on Azure portal shared_access_key_name='SHARED_ACCESS_POLICY_NAME', # Shared Access Policy name as created on Azure portal shared_access_key_value='SHARED_ACCESS_POLICY_KEY') # Shared Access Policy Key # implementirati logiku ponavljanja s unosom poruke s tipkovnice while True: poruka = input("Vaša poruka: ") msg = Message(poruka) sbs.send_topic_message('SERVICE_BUS_TOPIC', msg) # topic name as created on Azure portal
import uuid import datetime import random import json from azure.servicebus import ServiceBusService sbs = ServiceBusService(service_namespace='INSERT YOUR NAMESPACE NAME', shared_access_key_name='RootManageSharedAccessKey', shared_access_key_value='INSERT YOUR KEY') devices = [] for x in range(0, 10): devices.append(str(uuid.uuid4())) for y in range(0, 20): for dev in devices: reading = { 'id': dev, 'timestamp': str(datetime.datetime.utcnow()), 'uv': random.random(), 'temperature': random.randint(70, 100), 'humidity': random.randint(70, 100) } s = json.dumps(reading) sbs.send_event('INSERT YOUR EVENT HUB NAME', s) print y
import json import urllib2 import ConfigParser from azure.servicebus import ServiceBusService, Rule config = ConfigParser.SafeConfigParser() config.read('/root/config.ini') namespace = config.get('ServiceBus', 'Namespace') key_name = config.get('ServiceBus', 'KeyName') key_value = config.get('ServiceBus', 'KeyValue') sbs = ServiceBusService(namespace, shared_access_key_name=key_name, shared_access_key_value=key_value) sbs.create_subscription('notifications', 'ColdSpellNotifications_1') #rule = Rule() #rule.filter_type = 'SqlFilter' #rule.filter_type = "MessageType = 'ColdSpellEnteredNotification'" #sbs.create_rule('notifications', 'ColdSpellNotifications_1', 'ColdSpellNotifications', rule) #sbs.delete_rule('notifications', 'ColdSpellNotifications_1', DEFAULT_RULE_NAME) while True: msg = sbs.receive_subscription_message('notifications', 'ColdSpellNotifications_1') if msg.body: obj = json.loads(msg.body)
## Logging class ## This class should be regarded as static. ## There are three logging methods, 'Verbose(...)', 'Informative(...)', and 'Failure(...)'. ## Depending on the value of Logging.OutStream (default is 'stdout') the output locations of these ## three may be variable. ## TODO: Implement 'cloud logging' from datetime import datetime from os import path from azure.servicebus import ServiceBusService, Message import sets import nearby_config _config = nearby_config.named('analysis_worker_config.json') _bus_service = ServiceBusService( service_namespace=_config.EVENTHUB_NAMESPACE, shared_access_key_name=_config.EVENTHUB_SA_KEY_NAME, shared_access_key_value=_config.EVENTHUB_SA_KEY_VALUE) class Logging: # used for print debugging # these behave similarly to static, btw CONFIG_VERBOSE_OUTPUT = True CONFIG_INFORMATIVE_OUTPUT = True OutStream = 'stdout' _firedEvents = set() @staticmethod def Verbose(value):
# Databricks notebook source from azure.servicebus import ServiceBusService import json import random key_name = 'RootManageSharedAccessKey' # SharedAccessKeyName from Azure portal key_value = dbutils.secrets.get( "simple-demos", "keyValue") # SharedAccessKey from Azure portal EVENT_HUB_NAME = "sensor_hub" sbs = ServiceBusService("mpfdemohubs.servicebus.windows.net", shared_access_key_name=key_name, shared_access_key_value=key_value) # COMMAND ---------- # COMMAND ---------- # use json.dumps to convert the dictionary to a JSON format s = json.dumps({ "Sensor1": random.randint(100, 1000), "Sensor2": random.randint(100, 1000), "Sensor3": random.randint(100, 1000), "Sensor4": random.randint(100, 1000) }) # send to Azure Event Hub sbs.send_event(EVENT_HUB_NAME, s) # COMMAND ----------