def remoteWorker(queueName,worker,WorkFile): connection_ob = boto.connect_sqs(AWS_KEY,AWS_ACCESS_KEY) connection = boto.connect_dynamodb(AWS_KEY,AWS_ACCESS_KEY) myschema=connection.create_schema(hash_key_name='task_id',hash_key_proto_value='S') queue = connection_ob.create_queue(queueName) try: print "Creating Table.." table=connection.create_table(name='task_table', schema=myschema, read_units=100, write_units=100) print "Table Created Successfully...." except: print "Table already exist" msg = RawMessage() f = open(WorkFile) for line in iter(f): rand = random.randrange(0,9999) msg.set_body(line) msg.message_attributes = {"Values": { "data_type":"String", "string_value":str(rand) } } queue.write(msg) f.close() print "Data Inserted into Queue"
def queue(request, queue_name): conn = connector() queue = conn.get_all_queues(prefix=queue_name)[0] if request.POST: if '_clear' in request.POST: queue.clear() return redirect('/sqs/queues/' + queue_name) # will have to wait for s3 integration #if '_dump' in request.POST: # queue.save_to_s3('sqs_dump') elif '_delete' in request.POST: queue.delete() return redirect('/sqs/queues/') form = AddMessageForm(request.POST) if form.is_valid(): m = RawMessage() m.set_body(form.cleaned_data['message']) count = form.cleaned_data['count'] while count: queue.write(m) count -= 1 return redirect('/sqs/queues/' + queue_name) else: form = AddMessageForm() item = queue.get_attributes() item['name'] = queue.name return render(request, 'sqs/queue.html', { 'queue': item, 'form': form, })
def write(self, message): """ Add a raw message to the queue """ self.setup_queue() m = RawMessage() m.set_body(message) self.queue.write(m)
def write_message(self, q, msg="If you're sleepy and you know it; clap your hands!" ): m = RawMessage() m.set_body(msg) q.write(m)
def emit(self, record): if isinstance(record.args, dict): record.customer = record.args['customer'] else: record.customer = 'Sender' #formatted_record = self.format(record) ct = '%Y/%m/%d %H:%M:%S' dtime = datetime.datetime.utcfromtimestamp(record.created) data = {} data['levelname'] = record.levelname data['asctime'] = dtime.strftime(ct) data['customer'] = record.customer data['message'] = record.msg data['name'] = record.name data['funcName'] = record.funcName data['filename'] = record.filename data['@timestamp'] = datetime.datetime.strptime( data['asctime'], '%Y/%m/%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%SZ') msg = {} msg['_id'] = str(uuid.uuid4()) msg['_index'] = "sqs-river-" + datetime.datetime.now().strftime( "%Y.%m") msg['_type'] = "mailexpress" msg['_data'] = data #print json.dumps(msg) m = RawMessage() m.set_body(json.dumps(msg)) self.q.write(m)
def add_match(users_email, users_name, users_id, match_email, match_name, match_user_id): """called one for each side of match""" try: data = { 'submitdate': strftime("%Y-%m-%dT%H:%M:%S", gmtime()), 'user': { 'email': users_email, 'name': users_name, 'user_id': users_id }, 'match': { 'email': match_email, 'name': match_name, 'user_id': match_user_id } } m = RawMessage() m.set_body(json.dumps(data)) # TODO: Add Logging status = q.write(m) return status except SQSError, e: #TODO ADD Logging return False
def enqueue(self, coord): if not self._inflight(coord): payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) self.sqs_queue.write(message) self._add_to_flight(coord)
def sendToCustomsServer(self, ipaddress=None): try: if ipaddress is not None and self.options is not None: # connect and send a message like: # '{"Message": {"ban": {"ip": "192.168.0.2"}}}' # encoded like this: # {"Message":"{\"ban\":{\"ip\":\"192.168.0.2\"}}"} conn = boto.sqs.connect_to_region( self.options.region, aws_access_key_id=self.options.aws_access_key_id, aws_secret_access_key=self.options.aws_secret_access_key) queue = conn.get_queue(self.options.aws_queue_name) banMessage = dict( Message=json.dumps(dict(ban=dict(ip=ipaddress)))) m = RawMessage() m.set_body(json.dumps(banMessage)) queue.write(m) sys.stdout.write( 'Sent {0} to customs server\n'.format(ipaddress)) except Exception as e: sys.stderr.write('Error while sending to customs server %s: %r\n' % (ipaddress, e))
def add_feed_to_queue(json_feed): m = RawMessage() try: m.set_body(json.dumps(json_feed, default=json_util.default)) feed_queue.write(m) except Exception, e: print traceback.format_exc() print json_feed
def main(): conn = boto.sqs.connect_to_region("us-west-2") manifest_queue = conn.get_queue('ocr0') msg = {} msg['manifest'] = 'https://tomcrane.github.io/scratch/manifests/ida/m1011-santa-fe-1910-30.json' m = RawMessage() m.set_body(json.dumps(msg, indent=4)) manifest_queue.write(m)
def canvases_enqueue(queue, manifest_uri): item = canvas_processor.Manifest(manifest_uri) for canvas in item.canvases: msg = {} msg['manifest'] = item.requested.uri msg['canvas'] = canvas m = RawMessage() m.set_body(json.dumps(msg, indent=4)) queue.write(m)
def post_to_reprocess(self, message): m = RawMessage() m.set_body(json.dumps(message, 2)) if self.reprocess_queue is None: self.reprocess_queue = attach_queue(self.sqs_conn, self.reprocess_queue_name) self.reprocess_queue.write(m)
def insert_to_queue(queue, message_body, message_attributes): message = RawMessage() message.set_body(message_body) message.message_attributes = message_attributes queue.write(message) return None
def lambda_handler(event, context): users = db.users existing_user = users.find_one({'name': event['username']}) if existing_user: m = RawMessage() m.set_body({str(event['username']): 'FALSE'}) q.write(m) return 'That inputEmail already exists!' else: print('creating user for', event['username']) hashpass = event['password'] users.insert({'name': event['username'], 'password': hashpass}) m = RawMessage() m.set_body({str(event['username']): 'TRUE'}) q.write(m) verificationconn.verify_email_address(event['username']) return " Successful Registration"
def manifest_enqueue(manifest_uri): conn = boto.sqs.connect_to_region("us-west-2") manifest_queue = conn.get_queue('ocr0') print 'Working' msg = {} msg['manifest'] = manifest_uri m = RawMessage() m.set_body(json.dumps(msg, indent=4)) bar = manifest_queue.write(m) print bar
def on_data(self, tweet_data): try: tweet = json.loads(tweet_data) tweet["location"] = getGeoCode(tweet) m = RawMessage() m.set_body(tweet) q.write(m) print(m) except: pass
def addMessageToQueue(self, message, queue): # Data required by the API data = { 'key': str(uuid.uuid1()), 'date': str(message) } # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m)
def send(self, name): q = self.sqs.get_queue(self.queue) m = RawMessage() msg = { 'name': name, 'time': self.now, } if self.debug: self.pp.pprint(msg) m.set_body(json.dumps(msg)) q.write(m)
def write_message(self, connexion_settings, queue, message_data): message_body = json.dumps(message_data) self.logger.info("Sending message to lax: %s", message_body) sqs_conn = boto.sqs.connect_to_region( connexion_settings["sqs_region"], aws_access_key_id=connexion_settings["aws_access_key_id"], aws_secret_access_key=connexion_settings["aws_secret_access_key"]) m = RawMessage() m.set_body(message_body) output_queue = sqs_conn.get_queue(queue) output_queue.write(m)
def test_job_done_removes_tile_from_in_flight(self): from tilequeue.tile import CoordMessage coord = Coordinate(row=1, column=1, zoom=1) payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) coord_message = CoordMessage(coord, message) self.sqs.job_done(coord_message) from tilequeue.tile import coord_marshall_int exp_value = coord_marshall_int(coord) self.mockRedis.srem.assert_called_once_with(self.sqs.inflight_key, exp_value)
def post_parse_error(self, entity_line, traceback_exception): message = { 'line': entity_line, 'exception': str(traceback_exception) } m = RawMessage() m.set_body(json.dumps(message, 2)) if self.parse_error_queue is None: self.parse_error_queue = attach_queue(self.sqs_conn, self.parse_error_queue_name) self.parse_error_queue.write(m)
def notify_sqs(args, zonename): sqs = boto.sqs.connect_to_region(args.region) instance = args.name + "." + zonename sqs_queue = "autoscaling" manual_termination = '{"Type" : "Notification", "Subject" : "Manual: termination for instance ' + instance + '", "Message" : "{\\"Event\\":\\"manual:EC2_INSTANCE_TERMINATE\\",\\"EC2InstanceId\\":\\"' + instance + '\\"}"}' q = sqs.get_queue(sqs_queue) q.set_message_class(RawMessage) m = RawMessage() m.set_body(manual_termination) q.write(m) print "SQS: Termination event sent for %s." % args.name
def add_message_to_queue(project, sha): # Data required by the API data = {"project": project, "sha": sha} # Connect to SQS and open the queue sqs = boto.connect_sqs(os.environ["AWS_ACCESS_KEY"], os.environ["AWS_SECRET_KEY"]) q = sqs.create_queue("chatops-deployer-staging") # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def addMessageToQueue(message): data = { 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()), 'message': str(message) } sqs = boto.connect_sqs(AWSKey, AWSSecret) q = sqs.create_queue(queue) m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def notify_robot(userid, command, devicename, executedate): data = { 'requestdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'executedate': executedate, 'userid': userid, 'command': command, 'devicename': devicename } m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m) print 'your message sent : %s , status : %s, executedate:%s ' % ( m, status, executedate)
def addMessageToQueue(env, project, sha): # Data required by the API data = {"project": project, "sha": "{0}-{1}".format(env, sha)} # Connect to SQS and open the queue sqs = boto.connect_sqs(os.environ["AWS_ACCESS_KEY"], os.environ["AWS_SECRET_KEY"]) q = sqs.create_queue("chatops-deployer-{0}".format(env)) # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def report_parse_exception(self, line, message): message = { 'line': line, 'exception': message, } queue = attach_queue(self.sqs_conn, self.parse_error_queue_name) if queue: m = RawMessage() m.set_body(json.dumps(message)) queue.write(m)
def enqueue(self, coord): if not coord_is_valid(coord): # TODO log? return coord_int = coord_marshall_int(coord) if not self._inflight(coord): payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) sqs_queue_name = self.get_queue_name_for_zoom(coord.zoom) sqs_queue = self.sqs_queue_for_name.get(sqs_queue_name) assert sqs_queue, 'No queue found for: %s' % sqs_queue_name sqs_queue.write(message) self._add_to_flight(coord_int)
def enqueue(dstdir, tasks): qconn = boto.sqs.connect_to_region( "us-east-1", aws_access_key_id=QUEUE_AWS_ACCESS_KEY, aws_secret_access_key=QUEUE_AWS_SECRET_KEY) logProcQueue = qconn.get_queue(QUEUE_NAME) if logProcQueue is None: print("Creating SQS Queue: %s with Key %s" % (QUEUE_NAME, QUEUE_AWS_ACCESS_KEY)) logProcQueue = qconn.create_queue(QUEUE_NAME) data_out = {} data_out['directory'] = "%s/" % dstdir data_out['tasklist'] = tasks #get all the previous tasks in teh queue already to ensure no duplicates, then readd them messages = logProcQueue.get_messages(visibility_timeout=30, wait_time_seconds=2, num_messages=10) while len(messages) > 0: for message in messages: raw_json = message.get_body() data = json.loads(raw_json) if len(data['directory']) > 0: PREVQUEUEITEMS.add(data['directory']) messages = logProcQueue.get_messages( visibility_timeout=30, wait_time_seconds=2, num_messages=10) #continue reading if data_out['directory'] in PREVQUEUEITEMS: print( "The directory \"%s\" is already in the processing queue, skipping" % data_out['directory']) else: json_tasks = json.dumps(data_out) if len(json_tasks) > 250000: print( "Task %s has too much data, going to send 'too_long' so the worker does a manual lookup of work to do" % (data_out['directory'])) data_out = {} data_out['directory'] = "%s/" % dstdir data_out['tasklist'] = "too_long" json_tasks = json.dumps(data_out) queuemessage = RawMessage() queuemessage.set_body(json_tasks) print("Enqueing Task %s" % data_out['directory']) logProcQueue.write(queuemessage) qconn.close()
def enQueueNonCompletedDirectory(directory): if DATE_TO_PROCESS is not False: return #don't queue when running in manual mode qconn = boto.sqs.connect_to_region("us-east-1", aws_access_key_id=QUEUE_AWS_ACCESS_KEY, aws_secret_access_key=QUEUE_AWS_SECRET_KEY) logProcQueue = qconn.get_queue(INCOMPLETE_TASKS_QUEUE_NAME) if logProcQueue is None: print ("Creating SQS Queue: %s with Key %s" % (INCOMPLETE_TASKS_QUEUE_NAME,QUEUE_AWS_ACCESS_KEY)) logProcQueue = qconn.create_queue(INCOMPLETE_TASKS_QUEUE_NAME) data_out = {} data_out['directory'] = directory #in format of yyyy/mm/dd json_encoded_message = json.dumps(data_out) queuemessage = RawMessage() queuemessage.set_body(json_encoded_message) print("Enqueing Directory (YYYY/MM/DD) %s for re-scheduling and re-processing due to incomplete processing with me" % data_out['directory']) logProcQueue.write(queuemessage)