def run(): conn = boto.sqs.connect_to_region("us-west-2") requestQueue = conn.get_queue('GredientRequest') requestQueue.set_message_class(RawMessage) while (True): rs = requestQueue.get_messages(1) if (len(rs) != 0): m = rs[0] body = m.get_body() foodName = body.split('_')[0] requestQueue.delete_message(m) here = body.split('_')[1] responseQueue = conn.get_queue(body.split('_')[2]) if responseQueue is not None: responseQueue.set_message_class(RawMessage) response = RawMessage() markets = database.getMarkets(foodName) if len(markets) == 0: response.set_body(foodName+" is not found in our database!!") else: nearest = getNearestMarket(here, markets) name = nearest['Name'] address = nearest['Address'] dis = nearest['Distance'] response.set_body(foodName+" @ "+name+" @ "+address + " @ "+ dis) responseQueue.write(response)
def remoteWorker(queueName,worker,WorkFile): connection_ob = boto.connect_sqs(AWS_KEY,AWS_ACCESS_KEY) connection = boto.connect_dynamodb(AWS_KEY,AWS_ACCESS_KEY) myschema=connection.create_schema(hash_key_name='task_id',hash_key_proto_value='S') queue = connection_ob.create_queue(queueName) try: print "Creating Table.." table=connection.create_table(name='task_table', schema=myschema, read_units=100, write_units=100) print "Table Created Successfully...." except: print "Table already exist" msg = RawMessage() f = open(WorkFile) for line in iter(f): rand = random.randrange(0,9999) msg.set_body(line) msg.message_attributes = {"Values": { "data_type":"String", "string_value":str(rand) } } queue.write(msg) f.close() print "Data Inserted into Queue"
def enqueueNotification(phoneNumber, name, messageType = 'text'): config = configparser.RawConfigParser() config.read(os.path.join(os.getcwd() + '/django_drf_starter_project/scripts/','settings.cfg')) region = config.get('AWS', 'region') queueName = config.get('AWS', 'queue') accessKey = config.get('AWS', 'accessKey') secretKey = config.get('AWS', 'secretKey') pprint(accessKey) pprint(secretKey) conn = boto.sqs.connect_to_region(region,aws_access_key_id=accessKey,aws_secret_access_key=secretKey) ##change before final q = conn.create_queue(queueName) #10-second message visibility if len(phoneNumber) > 10: phoneNumberNoCountryCode = phoneNumber[1:] else: phoneNumberNoCountryCode = phoneNumber data = { 'phoneNumber': str(phoneNumberNoCountryCode), 'name': str(name), 'type': str(messageType) } m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def sendToCustomsServer(self, ipaddress=None): try: if ipaddress is not None and self.options is not None: # connect and send a message like: # '{"Message": {"ban": {"ip": "192.168.0.2"}}}' # encoded like this: # {"Message":"{\"ban\":{\"ip\":\"192.168.0.2\"}}"} conn = boto.sqs.connect_to_region( self.options.region, aws_access_key_id=self.options.aws_access_key_id, aws_secret_access_key=self.options.aws_secret_access_key) queue = conn.get_queue(self.options.aws_queue_name) banMessage = dict( Message=json.dumps(dict(ban=dict(ip=ipaddress)))) m = RawMessage() m.set_body(json.dumps(banMessage)) queue.write(m) sys.stdout.write( 'Sent {0} to customs server\n'.format(ipaddress)) except Exception as e: sys.stderr.write('Error while sending to customs server %s: %r\n' % (ipaddress, e))
def queue(request, queue_name): conn = connector() queue = conn.get_all_queues(prefix=queue_name)[0] if request.POST: if '_clear' in request.POST: queue.clear() return redirect('/sqs/queues/' + queue_name) # will have to wait for s3 integration #if '_dump' in request.POST: # queue.save_to_s3('sqs_dump') elif '_delete' in request.POST: queue.delete() return redirect('/sqs/queues/') form = AddMessageForm(request.POST) if form.is_valid(): m = RawMessage() m.set_body(form.cleaned_data['message']) count = form.cleaned_data['count'] while count: queue.write(m) count -= 1 return redirect('/sqs/queues/' + queue_name) else: form = AddMessageForm() item = queue.get_attributes() item['name'] = queue.name return render(request, 'sqs/queue.html', { 'queue': item, 'form': form, })
def connectAndWriteToSQS(queue, data): sqs = boto.connect_sqs() sqs = boto.sqs.connect_to_region('us-west-1') q = sqs.create_queue(queue) m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def commentsubmit(req): sdb = boto.connect_sdb(AWSKey, AWSSecret) domain = sdb.get_domain('comment') form = req.form imagekey = form['imagekey'] user = form['commentuser'] cmt = form['comment'] import uuid from time import strftime guid = str(uuid.uuid1()) item = domain.new_item(guid) item['submituser'] = user item['imagekey'] = imagekey item['comment'] = cmt item['status'] = "processing" item['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") item.save() sqsconn = SQSConnection(AWSKey, AWSSecret) q = sqsconn.get_queue('commentprocess') request = {} request['commentkey'] = guid request['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") request['comment'] = str(cmt) request['submituser'] = str(user) m = RawMessage() m.set_body(json.write(request)) status = q.write(m) response = {} if status==m: response['complete'] = True response['commentkey'] = guid else: response['complete'] = False return json.write(response)
def emit(self, record): if isinstance(record.args, dict): record.customer = record.args['customer'] else: record.customer = 'Sender' #formatted_record = self.format(record) ct = '%Y/%m/%d %H:%M:%S' dtime = datetime.datetime.utcfromtimestamp(record.created) data = {} data['levelname'] = record.levelname data['asctime'] = dtime.strftime(ct) data['customer'] = record.customer data['message'] = record.msg data['name'] = record.name data['funcName'] = record.funcName data['filename'] = record.filename data['@timestamp'] = datetime.datetime.strptime( data['asctime'], '%Y/%m/%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%SZ') msg = {} msg['_id'] = str(uuid.uuid4()) msg['_index'] = "sqs-river-" + datetime.datetime.now().strftime( "%Y.%m") msg['_type'] = "mailexpress" msg['_data'] = data #print json.dumps(msg) m = RawMessage() m.set_body(json.dumps(msg)) self.q.write(m)
def enqueue(self, coord): if not self._inflight(coord): payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) self.sqs_queue.write(message) self._add_to_flight(coord)
def WriteToWriteQueue(self, action, endpoint, body, headers, message_id, output_queue): writeQ = self.conn.get_queue(str(output_queue)) m = RawMessage() response ='' if (action == 'GET'): response = requests.get(endpoint, headers=headers) elif (action == 'PUT'): response = requests.put(endpoint, data = json.dumps(body), headers=headers) elif(action == 'POST'): response = requests.post(endpoint, data = json.dumps(body), headers=headers) elif (action == 'DELETE'): response = requests.delete(endpoint, headers=headers) print response.text response_final = { 'messageId': str(message_id), 'response': str(response.text) } m.set_body(json.dumps(response_final)) writeQ.write(m)
def write_to_que(data): conn = boto.sqs.connect_to_region("us-east-1") my_queue = conn.get_queue('email_queue') # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = my_queue.write(m)
def add_match(users_email, users_name, users_id, match_email, match_name, match_user_id): """called one for each side of match""" try: data = { 'submitdate': strftime("%Y-%m-%dT%H:%M:%S", gmtime()), 'user': { 'email': users_email, 'name': users_name, 'user_id': users_id }, 'match': { 'email': match_email, 'name': match_name, 'user_id': match_user_id } } m = RawMessage() m.set_body(json.dumps(data)) # TODO: Add Logging status = q.write(m) return status except SQSError, e: #TODO ADD Logging return False
def append_to_queue(content, queue, raw=False): '''Queues a message to SQS with a specific message''' body = json.dumps(content) m = RawMessage() if raw else Message() m.set_body(body) receipt = queue.write(m) return receipt
def sendMessageToInputQueue(q, anim_name,frame_file,type,userid): # Data required by the API if type == 'Frame': data = { 'msgtype': str(type), 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()),#secret key/anwer to s3 instead of boto config file 'userid': str(userid), #or bucketname attribute 'anim_name':str(anim_name), 'frame_file': str(frame_file) } elif type == 'killCommand': data = { 'msgtype': str(type), 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()),#secret key/anwer to s3 instead of boto config file 'userid': str(userid), #or bucketname attribute 'command': str(message) } # Connect to SQS and open the queue # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def write(self, message): """ Add a raw message to the queue """ self.setup_queue() m = RawMessage() m.set_body(message) self.queue.write(m)
def write_message(self, q, msg="If you're sleepy and you know it; clap your hands!" ): m = RawMessage() m.set_body(msg) q.write(m)
def ApprovalProcess(msg): data = dict() # The messages received from the imageresult queue # ???? format ??? str or int data['imagekey'] = str(msg['imagekey']) data['imageheight'] = str(msg['imageheight']) data['imagewidth'] = str(msg['imagewidth']) # retrive rest data from simpleDB domain = _getImageDomain() item = domain.get_item(msg['imagekey']) data['imageURL'] = item['imageurl'] data['tag'] = item['tag'] data['description'] = item['description'] data['submituser'] = item['submituser'] data['submitdate'] = item['submitdate'] # Connect to SQS and create the approval process queue sqs = boto.connect_sqs(AWSKey, AWSSecret) q = sqs.create_queue(approvalprocessQueue) # Put the message in the queue # m is raw message object # queue only accept message object m = RawMessage() m.set_body(json.dumps(data)) # add comment to Queue status = q.write(m)
def emit(self, record): if isinstance(record.args, dict): record.customer = record.args['customer'] else: record.customer = 'Sender' #formatted_record = self.format(record) ct = '%Y/%m/%d %H:%M:%S' dtime = datetime.datetime.utcfromtimestamp(record.created) data = {} data['levelname'] = record.levelname data['asctime'] = dtime.strftime(ct) data['customer'] = record.customer data['message'] = record.msg data['name'] = record.name data['funcName'] = record.funcName data['filename'] = record.filename data['@timestamp'] = datetime.datetime.strptime(data['asctime'], '%Y/%m/%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%SZ') msg = {} msg['_id'] = str(uuid.uuid4()) msg['_index'] = "sqs-river-" + datetime.datetime.now().strftime("%Y.%m") msg['_type'] = "mailexpress" msg['_data'] = data #print json.dumps(msg) m = RawMessage() m.set_body(json.dumps(msg)) self.q.write(m)
def write_msg(self, queue, msg): """ Write SQS message as JSON. """ m = RawMessage() m.set_body(base64.b64encode(json.dumps(msg))) q = self.conn.lookup(queue) msg_sent = q.write(m) print("Sent Msg; queue={0}, msg={1.id}".format(queue, msg_sent)) return msg_sent
def add_feed_to_queue(json_feed): m = RawMessage() try: m.set_body(json.dumps(json_feed, default=json_util.default)) feed_queue.write(m) except Exception, e: print traceback.format_exc() print json_feed
def setSQS(data, queue): # Put the message in the queue # m is raw message object # queue only accept message object m = RawMessage() m.set_body(json.dumps(data)) # add comment to Queue status = queue.write(m)
def main(): conn = boto.sqs.connect_to_region("us-west-2") manifest_queue = conn.get_queue('ocr0') msg = {} msg['manifest'] = 'https://tomcrane.github.io/scratch/manifests/ida/m1011-santa-fe-1910-30.json' m = RawMessage() m.set_body(json.dumps(msg, indent=4)) manifest_queue.write(m)
def to_queue(obj, queue_name): if environment.local: print('[queue] {}'.format(queue_name)) else: queue = sqs.get_queue(queue_name) message = RawMessage() message.set_body(json.dumps(obj)) queue.write(message)
def delete(self, queue_name, msg_id): """ This method is no longer used because frontier clients now handle deleting messages. This is only here to complete the interface. """ m = SqsMessage() m.receipt_handle = msg_id m.queue = self._queues_by_name[queue_name].queue m.delete()
def post_to_reprocess(self, message): m = RawMessage() m.set_body(json.dumps(message, 2)) if self.reprocess_queue is None: self.reprocess_queue = attach_queue(self.sqs_conn, self.reprocess_queue_name) self.reprocess_queue.write(m)
def sendCompleteToClientQueue(q,bucketName,status): data = { 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'bucketname': str(bucketName), 'status': status } m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def insert_to_queue(queue, message_body, message_attributes): message = RawMessage() message.set_body(message_body) message.message_attributes = message_attributes queue.write(message) return None
def sendMessageToClientQueue(q,bucketName,uploadProgress): data = { 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'bucketname': str(bucketName), 'uploadprogress': uploadProgress } m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def AddUrl(self, url): """ Adds the given URL to the pending queue url - The url to add to the queue """ message = RawMessage() message.set_body(url) self.URLQueue.write(message)
def AddData(self, data): """ Adds the given scraped data to the queue data - A dictionary of scrape data to add to the queue """ message = RawMessage() message.set_body(json.dumps(data)) self.DataQueue.write(message)
def add_to_que(data): conn = boto.sqs.connect_to_region("us-east-1") # if que exisits it returns exisiting que queue = conn.create_queue('chompy') # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m)
def canvases_enqueue(queue, manifest_uri): item = canvas_processor.Manifest(manifest_uri) for canvas in item.canvases: msg = {} msg['manifest'] = item.requested.uri msg['canvas'] = canvas m = RawMessage() m.set_body(json.dumps(msg, indent=4)) queue.write(m)
def do_activity(self, data=None): """ Do the work """ if self.logger: self.logger.info('data: %s' % json.dumps(data, sort_keys=True, indent=4)) ########### if not self.settings.consider_Lax_elife_2_0: if self.logger: self.logger.info('PublishToLax. Lax is not being considered. Skipping activity.') return True ########### article_id = data['article_id'] version = data['version'] run = data['run'] workflow_data = self.get_workflow_data(data) status = workflow_data['status'] eif_location = workflow_data['eif_location'] expanded_folder = workflow_data['expanded_folder'] self.emit_monitor_event(self.settings, article_id, version, run, "Publish To Lax", "start", "Starting preparation of article for Lax " + article_id) try: force = True if ("force" in data and data["force"] == True) else False message = lax_provider.prepare_action_message(self.settings, article_id, run, expanded_folder, version, status, eif_location, 'publish', force) message_body = json.dumps(message) self.logger.info("Sending message to lax: %s", message_body) sqs_conn = boto.sqs.connect_to_region( self.settings.sqs_region, aws_access_key_id=self.settings.aws_access_key_id, aws_secret_access_key=self.settings.aws_secret_access_key) out_queue = sqs_conn.get_queue(self.settings.xml_info_queue) m = RawMessage() m.set_body(message_body) out_queue.write(m) ######### except Exception as e: self.logger.exception("Exception when Preparing Publish action for Lax") self.emit_monitor_event(self.settings, article_id, version, run, "Publish To Lax", "error", "Error preparing or sending message to lax" + article_id + " message:" + str(e.message)) return False self.emit_monitor_event(self.settings, article_id, version, run, "Publish To Lax", "end", "Finished preparation of article for Lax " + article_id) return True
def manifest_enqueue(manifest_uri): conn = boto.sqs.connect_to_region("us-west-2") manifest_queue = conn.get_queue('ocr0') print 'Working' msg = {} msg['manifest'] = manifest_uri m = RawMessage() m.set_body(json.dumps(msg, indent=4)) bar = manifest_queue.write(m) print bar
def on_data(self, tweet_data): try: tweet = json.loads(tweet_data) tweet["location"] = getGeoCode(tweet) m = RawMessage() m.set_body(tweet) q.write(m) print(m) except: pass
def connectAndWriteToSQS(queue, data): #print('In AWS Commons..writng to q: %s' % queue) #print('In AWS Commons..Data: %s' % data) sqs = boto.connect_sqs() sqs = boto.sqs.connect_to_region('us-west-1') q = sqs.create_queue(queue) m = RawMessage() # time.sleep(10) m.set_body(json.dumps(data)) q.write(m)
def submitimage(req): sdb = boto.connect_sdb(AWSKey, AWSSecret) domain = sdb.get_domain('picture') form = req.form tags = str(form['tags']) user = str(form['submituser']) description = str(form['description']) fileitem = form['image'] import uuid from time import strftime guid = str(uuid.uuid1()) item = domain.new_item(guid) try: # Windows needs stdio set for binary mode. import msvcrt msvcrt.setmode (0, os.O_BINARY) # stdin = 0 msvcrt.setmode (1, os.O_BINARY) # stdout = 1 except ImportError: pass # strip leading path from file name to avoid directory traversal attacks fname = os.path.basename(fileitem.filename) # build absolute path to files directory dir_path = os.path.join(os.path.dirname(req.filename), 'files') open(os.path.join(dir_path, fname), 'wb').write(fileitem.file.read()) from boto.s3.connection import S3Connection conn = S3Connection(AWSKey, AWSSecret) bucket = conn.get_bucket('theimageproject') from boto.s3.key import Key k = Key(bucket) k.key = guid + ".jpg" k.set_contents_from_filename(os.path.join(dir_path, fname)) curtime = strftime("%Y-%m-%dT%H:%M:%S") item['description'] = description item['submituser'] = user item['submitdate'] = curtime item['rating'] = 0 item['ratingcount'] = 0 item['ratesort'] = "%s%s" % (0, curtime) item['status'] = "processing" item['tag'] = tags.split(',') item.save() sqsconn = SQSConnection(AWSKey, AWSSecret) q = sqsconn.get_queue('imageprocess') request = {} request['imagekey'] = guid request['submitdate'] = curtime m = RawMessage() m.set_body(json.write(request)) status = q.write(m) response = {} if status==m: response['success'] = True response['imagekey'] = guid else: response['complete'] = False return json.write(response)
def handle_error(e, message): console_log("exception: %s" % str(e)) raw_message = RawMessage() message_body = message.get_effective_message() message_body['exception'] = str(e) raw_message.set_body(str(json.dumps(message_body))) errorQueue.write(raw_message)
def addMessageToQueue(self, message, queue): # Data required by the API data = { 'key': str(uuid.uuid1()), 'date': str(message) } # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m)
def addMessage(message): data={ 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()), 'message': str(message) } conn = SQSConnection() conn = boto.connect_sqs() m = RawMessage() m.set_body(json.dumps(data)) status=q.write(m)
def send(self, name): q = self.sqs.get_queue(self.queue) m = RawMessage() msg = { 'name': name, 'time': self.now, } if self.debug: self.pp.pprint(msg) m.set_body(json.dumps(msg)) q.write(m)
def notify_worker(id, sizes): data = { 'submitdate' : time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'id' : id, 'sizes' :sizes } m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m) print 'your message sent : %s , status : %s ' % ( m,status)
def write_message(self, connexion_settings, queue, message_data): message_body = json.dumps(message_data) self.logger.info("Sending message to lax: %s", message_body) sqs_conn = boto.sqs.connect_to_region( connexion_settings["sqs_region"], aws_access_key_id=connexion_settings["aws_access_key_id"], aws_secret_access_key=connexion_settings["aws_secret_access_key"]) m = RawMessage() m.set_body(message_body) output_queue = sqs_conn.get_queue(queue) output_queue.write(m)
def test_job_done_removes_tile_from_in_flight(self): from tilequeue.tile import CoordMessage coord = Coordinate(row=1, column=1, zoom=1) payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) coord_message = CoordMessage(coord, message) self.sqs.job_done(coord_message) from tilequeue.tile import coord_marshall_int exp_value = coord_marshall_int(coord) self.mockRedis.srem.assert_called_once_with(self.sqs.inflight_key, exp_value)
def notify_robot(userid, command,devicename,executedate): data = { 'requestdate' : time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'executedate' : executedate, 'userid' : userid, 'command' :command, 'devicename' : devicename } m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m) print 'your message sent : %s , status : %s, executedate:%s ' % ( m,status, executedate)
def addMessageToQueue(message): data = { 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()), 'message': str(message) } sqs = boto.connect_sqs(AWSKey, AWSSecret) q = sqs.create_queue(queue) m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def post_parse_error(self, entity_line, traceback_exception): message = { 'line': entity_line, 'exception': str(traceback_exception) } m = RawMessage() m.set_body(json.dumps(message, 2)) if self.parse_error_queue is None: self.parse_error_queue = attach_queue(self.sqs_conn, self.parse_error_queue_name) self.parse_error_queue.write(m)
def add_message_to_queue(project, sha): # Data required by the API data = {"project": project, "sha": sha} # Connect to SQS and open the queue sqs = boto.connect_sqs(os.environ["AWS_ACCESS_KEY"], os.environ["AWS_SECRET_KEY"]) q = sqs.create_queue("chatops-deployer-staging") # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def report_parse_exception(self, line, message): message = { 'line': line, 'exception': message, } queue = attach_queue(self.sqs_conn, self.parse_error_queue_name) if queue: m = RawMessage() m.set_body(json.dumps(message)) queue.write(m)
def notify_sqs(args, zonename): sqs = boto.sqs.connect_to_region(args.region) instance = args.name + "." + zonename sqs_queue = "autoscaling" manual_termination = '{"Type" : "Notification", "Subject" : "Manual: termination for instance ' + instance + '", "Message" : "{\\"Event\\":\\"manual:EC2_INSTANCE_TERMINATE\\",\\"EC2InstanceId\\":\\"' + instance + '\\"}"}' q = sqs.get_queue(sqs_queue) q.set_message_class(RawMessage) m = RawMessage() m.set_body(manual_termination) q.write(m) print "SQS: Termination event sent for %s." % args.name
def notify_robot(userid, command, devicename, executedate): data = { 'requestdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'executedate': executedate, 'userid': userid, 'command': command, 'devicename': devicename } m = RawMessage() m.set_body(json.dumps(data)) status = queue.write(m) print 'your message sent : %s , status : %s, executedate:%s ' % ( m, status, executedate)
def addMessageToQueue(env, project, sha): # Data required by the API data = {"project": project, "sha": "{0}-{1}".format(env, sha)} # Connect to SQS and open the queue sqs = boto.connect_sqs(os.environ["AWS_ACCESS_KEY"], os.environ["AWS_SECRET_KEY"]) q = sqs.create_queue("chatops-deployer-{0}".format(env)) # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) q.write(m)
def enqueue(self, coord): if not coord_is_valid(coord): # TODO log? return coord_int = coord_marshall_int(coord) if not self._inflight(coord): payload = serialize_coord(coord) message = RawMessage() message.set_body(payload) sqs_queue_name = self.get_queue_name_for_zoom(coord.zoom) sqs_queue = self.sqs_queue_for_name.get(sqs_queue_name) assert sqs_queue, 'No queue found for: %s' % sqs_queue_name sqs_queue.write(message) self._add_to_flight(coord_int)
def enqueue(dstdir, tasks): qconn = boto.sqs.connect_to_region( "us-east-1", aws_access_key_id=QUEUE_AWS_ACCESS_KEY, aws_secret_access_key=QUEUE_AWS_SECRET_KEY) logProcQueue = qconn.get_queue(QUEUE_NAME) if logProcQueue is None: print("Creating SQS Queue: %s with Key %s" % (QUEUE_NAME, QUEUE_AWS_ACCESS_KEY)) logProcQueue = qconn.create_queue(QUEUE_NAME) data_out = {} data_out['directory'] = "%s/" % dstdir data_out['tasklist'] = tasks #get all the previous tasks in teh queue already to ensure no duplicates, then readd them messages = logProcQueue.get_messages(visibility_timeout=30, wait_time_seconds=2, num_messages=10) while len(messages) > 0: for message in messages: raw_json = message.get_body() data = json.loads(raw_json) if len(data['directory']) > 0: PREVQUEUEITEMS.add(data['directory']) messages = logProcQueue.get_messages( visibility_timeout=30, wait_time_seconds=2, num_messages=10) #continue reading if data_out['directory'] in PREVQUEUEITEMS: print( "The directory \"%s\" is already in the processing queue, skipping" % data_out['directory']) else: json_tasks = json.dumps(data_out) if len(json_tasks) > 250000: print( "Task %s has too much data, going to send 'too_long' so the worker does a manual lookup of work to do" % (data_out['directory'])) data_out = {} data_out['directory'] = "%s/" % dstdir data_out['tasklist'] = "too_long" json_tasks = json.dumps(data_out) queuemessage = RawMessage() queuemessage.set_body(json_tasks) print("Enqueing Task %s" % data_out['directory']) logProcQueue.write(queuemessage) qconn.close()
def enQueueNonCompletedDirectory(directory): if DATE_TO_PROCESS is not False: return #don't queue when running in manual mode qconn = boto.sqs.connect_to_region("us-east-1", aws_access_key_id=QUEUE_AWS_ACCESS_KEY, aws_secret_access_key=QUEUE_AWS_SECRET_KEY) logProcQueue = qconn.get_queue(INCOMPLETE_TASKS_QUEUE_NAME) if logProcQueue is None: print ("Creating SQS Queue: %s with Key %s" % (INCOMPLETE_TASKS_QUEUE_NAME,QUEUE_AWS_ACCESS_KEY)) logProcQueue = qconn.create_queue(INCOMPLETE_TASKS_QUEUE_NAME) data_out = {} data_out['directory'] = directory #in format of yyyy/mm/dd json_encoded_message = json.dumps(data_out) queuemessage = RawMessage() queuemessage.set_body(json_encoded_message) print("Enqueing Directory (YYYY/MM/DD) %s for re-scheduling and re-processing due to incomplete processing with me" % data_out['directory']) logProcQueue.write(queuemessage)