Example #1
0
def test_worker_fills_internal_queue_from_celery_task():
    """
    Test read workers fill internal queue with celery tasks
    """
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    message = Message()
    body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}'
    message.set_body(body)
    queue.write(message)

    internal_queue = Queue()
    worker = ReadWorker(queue, internal_queue)
    worker.read_message()

    packed_message = internal_queue.get(timeout=1)
    found_message_body = decode_message(packed_message['message'])
    found_message_body.should.equal({
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message2',
        },
    })
Example #2
0
def test_worker_fills_internal_queue():
    """
    Test read workers fill internal queue
    """

    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    message = Message()
    body = json.dumps({
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    })
    message.set_body(body)
    queue.write(message)

    internal_queue = Queue()
    worker = ReadWorker(queue, internal_queue)
    worker.read_message()

    packed_message = internal_queue.get(timeout=1)
    found_message_body = decode_message(packed_message['message'])
    found_message_body.should.equal({
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    })
Example #3
0
def test_worker_fills_internal_queue_only_until_maximum_queue_size():
    """
    Test read workers fill internal queue only to maximum size
    """
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")
    queue.set_timeout(1)  # Set visibility timeout low to improve test speed

    message = Message()
    body = json.dumps({
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    })
    message.set_body(body)
    for i in range(3):
        queue.write(message)

    internal_queue = Queue(maxsize=2)
    worker = ReadWorker(queue, internal_queue)
    worker.read_message()

    # The internal queue should only have two messages on it
    internal_queue.get(timeout=1)
    internal_queue.get(timeout=1)

    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Example #4
0
def test_worker_fills_internal_queue_and_respects_visibility_timeouts():
    """
    Test read workers respect visibility timeouts
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")
    queue.set_timeout(1)

    # Add MEssages
    message = Message()
    body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}'
    message.set_body(body)
    queue.write(message)
    queue.write(message)
    queue.write(message)

    # Run Reader
    internal_queue = Queue(maxsize=1)
    worker = ReadWorker(queue, internal_queue)
    worker.read_message()

    # Check log messages
    logger.handlers[0].messages['warning'][0].should.contain("Timed out trying to add the following message to the internal queue")
    logger.handlers[0].messages['warning'][1].should.contain("Clearing Local messages since we exceeded their visibility_timeout")
Example #5
0
    def post_count_tweets_result(self, job_id, coordinate_box, count):
        '''
        Submits the results of the tweet count.

        @param job_id Tracking id of the job
        @paramType uuid/string
        @param coordinate_box Box in which the tweets were counted
        @paramType dictionary
        @param count # of tweets in the coordinate_box
        @paramType int
        @returns n/a
        '''
        assert job_id is not None
        assert coordinate_box is not None
        assert count is not None

        message = Message() # Set up the message
        message.set_body(json.dumps({
            'job_id' : job_id,
            'task' : 'count_tweets',
            'result' : count,
            'coordinate_box' : coordinate_box
        }))

        result = self.queue.write(message) # Write out the request
        assert result is not None, 'Failed to push results to queue!'
Example #6
0
 def submit_task(self, task, **kwargs):
     queue = self._choose_queue(task, **kwargs)
     logger.debug("Sending task '%s' to queue '%s'.", task.id,
                  queue.name)
     m = Message()
     m.set_body(json.dumps(task.to_primitive()))
     return queue.write(m)
Example #7
0
 def resubmit_task(self, task, delay, **kwargs):
     task.increment_attempt()
     logger.debug("Resubmitting task %s with %d second delay.", task.id,
                  delay)
     m = Message()
     m.set_body(json.dumps(task.serialize()))
     return self.queue.write(m, delay_seconds=delay)
Example #8
0
def queue_msgwrite(queue):
	"""
	Writes a message to the queue
	
	curl -X POST -H 'Content-Type: application/json' http://localhost:5000/queue/<mytestqueue>/msqs -d '{"content":"message"}'
	"""
	conn = get_conn()
	body = request.get_json(force=True)

	q = conn.get_queue("C13765235-%s" % queue)

	resp = {}

	if q != None:
		m = Message()
		m.set_body(body["content"])
		q.write(m)
		resp["id"] = q.id
		resp["message"] = m.get_body()
	else:
		resp["status"] = "Not Found"

	return Response(
		response=json.dumps(resp),
		mimetype="application/json"
	)
	def run(self):
		conn = boto.sqs.connect_to_region(self.mysqsZone)
		if not conn:
			print "NotificationManager.py: error while connecting at"+self.mysqsZone+"zone"
		fakelist	=	range(int(self.rEnd)-int(self.rStart))
		while True:
			now	=	time.time()
			currentID	=	int(self.rStart)
			for item in fakelist:
				aRequestId				=	int(time.time())
				JsonRequest				=	jm.createOverviewRequest(aRequestId,"_APPosto_SDCC_notification_poller",currentID)
				destinationQueueName	=	"_APPosto_requests_queue"
				dest_queue = conn.get_queue(destinationQueueName)
				while dest_queue == None:
					dest_queue = conn.create_queue(str(destinationQueueName))
					if dest_queue==None:
						print "queue creation failed"
				m = Message()
				m.set_body(str(JsonRequest))
				dest_queue.write(m)
				currentID	=	currentID+1
			duration	=	time.time()-now
			slack	=	int(self.frequency)-int(duration)
			if slack>0:
				print "NotificationManager: prossimo polling tra almeno "+str(slack)+" secondi"
				time.sleep(float(slack))	
Example #10
0
def sqs_enque(data):
    '''
    Send (string) data to an Amazon SQS queue.

    Good uses:
    * Send a list of filenames of tracking files stored on S3.
      ~50k requests for around 3 pennies.

    Bad uses:
    * Send all events to Amazon SQS as part of a pipeline. Each
      billion events runs us $500.

    TODO: Batch operations. Would improve performance and cut cost
    10x.

    (Untested)
    '''
    sqs_conn = boto.sqs.connect_to_region(
        "us-east-1",
        aws_access_key_id=settings['edx-aws-access-key-id'],
        aws_secret_access_key=settings['edx-aws-secret-key']
    )
    q = sqs_conn.get_queue(settings["tracking-logs-queue"])

    for item in data:
        m = Message()
        m.set_body(item)
        q.write(m)
	def test_count(self):
		q = SQSQueueMock('test')
		m = Message()
		m.set_body('this is a test')
		status = q.write(m)
		self.assertTrue(status)
		self.assertTrue(q.count() == 1)
		
		status = q.write(m)
		self.assertTrue(status)
		self.assertTrue(q.count() == 2)
		
		status = q.write(m)
		self.assertTrue(status)
		self.assertTrue(q.count() == 3)
		
		messages = q.get_messages(num_messages=1)
		self.assertTrue(q.delete_message(messages[0]))
		self.assertTrue(q.count() == 2)
		
		messages = q.get_messages(num_messages=1)
		self.assertTrue(q.delete_message(messages[0]))
		self.assertTrue(q.count() == 1)
		
		messages = q.get_messages(num_messages=1)
		self.assertTrue(q.delete_message(messages[0]))
		self.assertTrue(q.count() == 0)
Example #12
0
def processLogContent(filename, logContent):
	for line in logContent.split('\n'):
		# line should start with a timestamp: 2014
		if not line.startswith("20"):
			continue
		# line should have an exception in it
		if "Exception" not in line:
			continue
		
		parsedLine = line.split(" ")
		
		exceptionDate    = parsedLine[0]
		exceptionTime    = parsedLine[1].split(",")[0] # no milliseconds
		exceptionType    = parsedLine[2]
		exceptionTask    = parsedLine[3].strip("[]")
		exceptionClass   = parsedLine[4].strip("[]")
		exceptionMessage = " ".join(parsedLine[5::])

		message = {"file": filename,
		           "date": exceptionDate,
		           "time": exceptionTime,
		           "type": exceptionType,
		           "task": exceptionTask,
		           "class": exceptionClass,
		           "message": exceptionMessage}
		
		sqsMessage = Message()
		sqsMessage.set_body(json.dumps(message))
		sendMessageToQueue(sqsMessage)
def readtaskSQS(filename, queue_name, process_queue):
    aws_conn=boto.sqs.connect_to_region("us-east-1", aws_access_key_id='{aws_access_key_id}', aws_secret_access_key = '{aws_secret_access_key}')
    dynamo_conn = boto.dynamodb.connect_to_region("us-east-1", aws_access_key_id='{aws_access_key_id}', aws_secret_access_key = '{aws_secret_access_key}')
    SQS_queue=aws_conn.get_queue(queue_name)
    SQS_process_queue=aws_conn.get_queue(process_queue)
    task_id = 0;

    try:
        task_table_schema = conn_dynamo.create_schema(hash_key_name='task_id',hash_key_proto_value=str)
        table = conn_dynamo.create_table(name = 'Dynamo_Table', schema = task_table_schema,read_units = 10,write_units = 10)
        print 'Table Dynamo_Table has been created'
    except Exception as e:
        print 'Dynamo_Table already exists.'

    with open(filename) as f:
        task_list=f.readlines()

    for i in task_list:
        msg = Message()
        json_msg = {}
        json_msg["task_id"] = task_id
        json_msg["task"] = i
        msg.set_body(json.dumps(json_msg))
        SQS_queue.write(msg);
        task_id=task_id+1

    return SQS_queue, SQS_process_queue, task_id;
def enqueue(queue, files):
    '''
    loops through an array of CSV file paths in order to convert each row
    to a JSON object, and then add it to the SQS Queue
    '''
    counter = 0

    # loop through files
    for f in files:

        # create a reader for the CSV object that will provide each row as
        # a dictionary
        reader = unicodecsv.DictReader(open(f))

        # loop through rows of the CSV
        for data in reader:

            # create a json version of the csv data
            package = json.dumps(data)

            # create and submit a new message to the queue
            m = Message()
            m.set_body(package)
            queue.write(m)

            # increment counter and draw a '.' to screen to show progress
            counter += 1
            ping()

    print "\n\nExiting - End of data files.  %d rows queued.\n" % counter
Example #15
0
File: Main.py Project: zapree/346
def ProcessDelta():
    if request.headers.get('X-AppEngine-QueueName') is None:
        # Ignore if not from AppEngine
        abort(403)

    client = dropbox.client.DropboxClient(token)
    try:
        #see if we can grab the cursor file from dropbox
        f = client.get_file('/.cursor')
        cursor = f.read()
    except:
        #if we can't, set cursor to none and it will
        #get all of the changes, then we write to it
        cursor = None

    delta = client.delta(cursor)

    for filepath, data in delta['entries']:
        #check that there is an entry, and that it isn't a cursor
        if (data is not None) and (filepath != '/.cursor'):
            #check for directories and compressed files
            if not data['is_dir'] and not filepath.endswith('.gz'):
                #we have something we want to process
                q = aws_sqs.create_queue(queue_name, 30)
                message = Message()
                message.set_body(filepath)
                q.write(message)

                #write the changes to the cursor file in dropbox
                cursor = delta['cursor']
                client.put_file('/.cursor', cursor, overwrite=True)
Example #16
0
    def post(self):
        if request.content_type != 'application/json':
            abort(400, message="Invalid Content-Type")

        data = request.get_json()
        if not isinstance(data, dict):
            data = {}

        url = data.get('video_url', '')
        owner = data.get('owner', '')
        s = Session(url)
        s.owner = owner

        db.session.add(s)
        db.session.commit()

        if app.config['SEND_MESSAGES']:
            msg = Message()
            msg.set_body(json.dumps(marshal(s, SessionAPI.fields)))
            app.session_queue.write(msg)

        increment_asg('video-processing-group')


        return {"message": "Success", "id": s.id}
def create_queue_order ( pair, side, units ):

	# Create connection to SQS queue.
	conn = boto.sqs.connect_to_region('us-east-1')
	queue = conn.get_queue('forex_moving_average_orders')

	# Set queue message.
	message = Message()
	message.set_body( pair + ' ' + side +  ' ' + str(units) )

	# Set queue message attributes.
	message.message_attributes = {
		"pair": {
			"data_type": "String",
			"string_value": pair
		},
		"side": {
			"data_type": "String",
			"string_value": side
		},
		"units": {
			"data_type": "Number",
			"string_value": str(units)
		}
	}

	# Write message to queue.
	queue.write(message)
Example #18
0
def parse_add_activities():
    json_api_spec = {}
    rq_id = request.query.id
    rq_name = request.query.name
    rq_activities = request.query.activities
    global request_count
    print '\n'
    request_count += 1
    dict_sqs = {
        'id': rq_id,
        'name': rq_name,
        'activities': rq_activities,
        'request_type': 'add_activities',
        'rq_count': request_count,
        }
    m = Message()
    js = json.dumps(dict_sqs)
    m.set_body(js)
    sqs_in.write(m)
    json_ret = \
        json.dumps('{"data": {"type": "Notification","msg": "Accepted"}}'
                   )
    json_full_spec = {}
    json_full_spec['type'] = 'Notification'
    json_full_spec['msg'] = 'Accepted'
    json_api_spec['data'] = json_full_spec
    return json_api_spec
Example #19
0
 def POST(self):
     global q
     data = web.data()
     print data
     m = Message() 
     m.set_body(data)
     q.write(m)
Example #20
0
def run(message):
   try_count = 0
   while True:
      if try_count != MAX_TRY:
         launch_result = launch_vm(message)
         #print launch_result
         result = json.loads(launch_result)
         if result['rescode'] == '1':
            sqs_conn = boto.connect_sqs()
            request_queue = sqs_conn.create_queue(REQUEST_QUEUE)
            request_queue.delete_message(message)
            print "Delete message from dev queue :("
            return 1
            break
         elif result['rescode'] == '2':
            print 'job fail'
            try_count = try_count +1
      else:
         print try_count
         print "last job fail"
         sqs_conn = boto.connect_sqs()
         request_queue = sqs_conn.create_queue(REQUEST_QUEUE)
         request_queue.delete_message(m)
         new_m = Message()
         new_m.set_body('new')
         status = request_queue.write(new_m)
         print "Re-insert queue message (fail) :(" 
         return 2
         break
def queue_article_publication(article_id, version, run):
    if version is None or run is None:
        return {
                    'publication-status': 'error',
                    'id': article_id,
                    'version': str(version),
                    'run': run
                }
    queue_provider = QueueProvider()
    out_queue = queue_provider.get_queue(settings.workflow_starter_queue)
    article = get_article(article_id)
    article_versions = article.get('versions')
    result = {}

    if isinstance(article_versions, dict):
        version_data = article_versions.get(int(version))
        if isinstance(version_data, dict):
            version_properties = version_data.get('properties')
            if isinstance(version_properties, dict):
                # TODO : get publication status and check still valid
                # also see http://jira.elifesciences.org:8080/browse/ELPP-613
                status = 'queued'
                status_message = 'article queued'

                publication_data = version_properties.get('_publication-data')

                # This string is a base64 encoded message which allows initiation of the PostPerfectPublciation
                # workflow.

                # This class now needs constructs a workflow starter message which initiates the ApprovePerfectArticle
                # workflow. That workflow needs data to give drupal to publish the version <version> of article
                # <article_id> and also requires the data in this encoded string to initiate PostPerfectPublication upon
                #  successful publication so pass it article_id, version and the base 64 encoded string via the starter
                # mechanism

                follow_on_data = {
                    'article_id': article_id,
                    'version': version,
                    'run': run,
                    'publication_data': publication_data.get('value')
                }

                message = {
                    'workflow_name': 'ApproveArticlePublication',
                    'workflow_data': follow_on_data
                }

                m = Message()
                m.set_body(json.dumps(message))
                out_queue.write(m)

                result = {
                    'publication-status': status,
                    'id': article_id,
                    'version': str(version),
                    'run': run

                }

    return result
def add_ids_to_queue(args, and_expression):
    engine = create_engine(DB_LOGIN)
    connection = engine.connect()

    # noinspection PyBroadException
    try:
        queue = None
        if not args.test:
            sqs_helper = SqsHelper('us-east-1')
            queue = sqs_helper.get_queue(args.queue_name)

        select_galaxy = select([GALAXY.c.galaxy_id]).where(and_expression).order_by(func.RAND())

        counter = 1
        for galaxy in connection.execute(select_galaxy):
            if counter % 100 == 0:
                LOG.info('Added {0}'.format(counter))

            counter += 1
            message_text = '{0}'.format(galaxy[GALAXY.c.galaxy_id])
            if args.test:
                LOG.info(message_text)
            else:
                message = Message()
                message.set_body(message_text)
                queue.write(message)

    except Exception:
        LOG.exception('Error adding ids')
Example #23
0
	def on_data(self,data):
		print data
		ms=cPickle.dumps(data)
		m=Message()
		m.set_body(msg)
		status=q[0].write(m)
		return True
Example #24
0
File: sqs.py Project: e-loue/queues
 def write(self, message):
     try:
         m = Message()
         m.set_body(message)
         return self._queue.write(m)
     except SQSError, e:
         raise QueueException, "%s" % e.code
Example #25
0
 def poll_queue_test(self, mock_process_message):
     example_message_body = "---\ntest_job_name:\n  request_id: testrequestid\n"
     message = Message()
     message.set_body(example_message_body)
     self.sqslistener.queue.write(message)
     self.sqslistener.poll_queue()
     assert mock_process_message.called
Example #26
0
    def send_to_sqs(self, msg):
        global log
        conn = boto.sqs.connect_to_region(settings.AWS_CONF['region'],
                                          aws_access_key_id=settings.AWS_CONF['awskey'],
                                          aws_secret_access_key=settings.AWS_CONF['awssecret'],
                                          is_secure=False)
        try:
            parsed_json = simplejson.loads(msg)
        except Exception as e:
            logger.critical("Invalid message:%s Reason:%s" % (msg, e))

        # wrap with timestamp
        if 'timestamp' not in parsed_json:
            msg = '{"event":%s , "timestamp":"%d"}' % (msg, int(time()))

        logger.info('Message %s' % msg)

        # Send to SQS
        if conn is None:
            logger.critical("Could not connect to AWS: region=%s" % settings.AWS_CONF['region'])
        else:
            q = conn.get_queue(settings.AWS_CONF['sqsqueue'])
            if q is None:
                logger.critical("Error connecting to SQS queue: %s" %
                        settings.AWS_CONF['sqsqueue'])
                return
            m = Message()
            m.set_body(msg)
            q.write(m)
            if m.id is None:
                logger.critical("Could not send this msg to queue: %s" % m.get_body())
            else:
                logger.info('Wrote %s' % m.id)
Example #27
0
def wake_up_message_send():
   sqs_conn = boto.connect_sqs()
   request_queue = sqs_conn.create_queue(master_queue_name)
   rvm_host = get_rvm_hostname()
   if True:
      for i in range(0, 5):
         try:
            log.debug("Attempted to get hostname")
            fp = urllib.urlopen('http://%s/latest/meta-data/local-hostname' % rvm_host)
            local_hostname = fp.read()
            fp.close()
            if local_hostname:
               break
         except IOError:
            pass

      for i in range(0, 5):
         try:
            log.debug("Attempted to get hostip")
            fp = urllib.urlopen('http://%s/latest/meta-data/local-ipv4' % rvm_host)
            local_ipaddress = fp.read()
            fp.close()
            if local_ipaddress:
               break
         except IOError:
            pass

   new_message = Message()
   msg = "MASTER|%s|%s" % (local_hostname, local_ipaddress)
   new_message.set_body(msg)
   status = request_queue.write(new_message)
   log.debug("sending message '%s'" % msg)
Example #28
0
    def on_status(self, status):
        if status:
            json_data = json.loads(status)
            id_str=json_data['id_str']
            location=re.escape(json_data['user']['location'])
           
            coordinates=json_data['coordinates']
            text=json_data['text']
            if coordinates:
                coordinates_list=coordinates.items()
                longitude=coordinates_list[1][1][0]
                latitude=coordinates_list[1][1][1]
            if not location:
                location=""
            location_ascii =  ''.join([i if ord(i) < 128 else ' ' for i in location])
            text_ascii =  ''.join([j if ord(j) < 128 else ' ' for j in text])
            category = ""
            for substring in track:
                if substring in status.lower():
                    category = substring
                    break
            if id_str and coordinates and category:
                sql='insert ignore into twittmapapp_tweetdata (id_str,location,category,longitude,latitude) values ("'+id_str+'","'+location_ascii+'","'+category+'",'+str(longitude)+','+str(latitude)+');'
                t=self.cur.execute(sql)  
                m = Message()
                m.set_body("This message contains text,id,latitude and longitude")
                m.message_attributes = {"text":{"data_type": "String","string_value":text_ascii}, "id":{"data_type": "String","string_value":id_str},"category":{"data_type": "String","string_value":category},"longitude":{"data_type": "String","string_value":str(longitude)},"latitude":{"data_type": "String","string_value":str(latitude)}}
                self.q.write(m)  
#                 print "hi"
#                 response = self.alchemyapi.sentiment("text", text_ascii) 
#                 if response['status']!="ERROR":
#                     print "Sentiment: ", response["docSentiment"]["type"]
#                     self.sns.publish(self.topic, response["docSentiment"]["type"], self.subject);
        return
Example #29
0
 def publish_node_stats(self):
     
     try:
     
         # create a connection to SQS
         conn = SQSConnection();
     
         # ask for the QUEUE
         q = conn.get_queue(NODE_STATS_QUEUE);
         
         # create a new message
         m = Message();
         
         # populate the message with stats
         m.set_body(self.node_stats());
         
         # publish the message to SQS
         q.write(m);
         
         # schedule another publish
         self.schedule_node_stats();
         
     except Exception as e:
         
         # blab about the err on std err
         os.sys.stderr.write(str(e)+'\n');
         
         # log error message to the error queue
         self.publish_error('publish_node_stats: '+str(e));
    def callback(self, filename, lines, **kwargs):
        timestamp = self.get_timestamp(**kwargs)
        if kwargs.get('timestamp', False):
            del kwargs['timestamp']

        message_batch = []
        message_batch_size = 0
        message_batch_size_max = 250000 # Max 256KiB but leave some headroom

        for line in lines:
            m = Message()
            m.set_body(self.format(filename, line, timestamp, **kwargs))
            message_size = len(m)

            if (message_size > message_batch_size_max):
                self._logger.debug('Dropping the message as it is too large to send ({0} bytes)'.format(message_size))
                continue

            # SQS can only handle up to 10 messages in batch send and it can not exceed 256KiB (see above)
            # Check the new total size before adding a new message and don't try to send an empty batch
            if (len(message_batch) > 0) and (((message_batch_size + message_size) >= message_batch_size_max) or (len(message_batch) == 10)):
                self._logger.debug('Flushing {0} messages to SQS queue {1} bytes'.format(len(message_batch), message_batch_size))
                self._send_message_batch(message_batch)
                message_batch = []
                message_batch_size = 0

            message_batch_size = message_batch_size + message_size
            message_batch.append((uuid.uuid4(), self.format(filename, line, timestamp, **kwargs), 0))

        if len(message_batch) > 0:
            self._logger.debug('Flushing the last {0} messages to SQS queue {1} bytes'.format(len(message_batch), message_batch_size))
            self._send_message_batch(message_batch)

        return True
Example #31
0
 def load_from_file(self, fp, sep='\n'):
     """Utility function to load messages from a file-like object to a queue"""
     n = 0
     body = ''
     l = fp.readline()
     while l:
         if l == sep:
             m = Message(self, body)
             self.write(m)
             n += 1
             print 'writing message %d' % n
             body = ''
         else:
             body = body + l
         l = fp.readline()
     return n
Example #32
0
 def test_send_receive_message(self):
     q = self.conn.create_queue('message_test')
     msg_created = Message()
     msg_created.set_body('hello world')
     msg_sent = q.write(msg_created)
     self.assertEquals(msg_created, msg_sent)
     read_msg = q.read()
     self.assertIsNotNone(read_msg)
     print 'written', msg_created.get_body()
     print 'read', read_msg.get_body()
     self.assertEquals(msg_created.get_body(), read_msg.get_body())
Example #33
0
 def load(self, file_name, sep='\n'):
     """Utility function to load messages from a file to a queue"""
     fp = open(file_name, 'rb')
     n = 0
     body = ''
     l = fp.readline()
     while l:
         if l == sep:
             m = Message(self, body)
             self.write(m)
             n += 1
             print 'writing message %d' % n
             body = ''
         else:
             body = body + l
         l = fp.readline()
     fp.close()
     return n
Example #34
0
    def __init__(self, callable, *args, **kwargs):
        """
        Create a new Job,

        :param obj callable: [optional] A callable to run.
        """
        self.start_time = None
        self.stop_time = None
        self.run_time = None
        self.exception = None
        self.result = None
        self.callable = callable
        self.args = args
        self.kwargs = kwargs
        self.message = Message(body=dumps({
            'callable': self.callable,
            'args': self.args,
            'kwargs': self.kwargs,
        }))
Example #35
0
    def test_get_messages_attributes(self):
        conn = SQSConnection()
        current_timestamp = int(time.time())
        queue_name = 'test%d' % int(time.time())
        test = conn.create_queue(queue_name)
        self.addCleanup(conn.delete_queue, test)
        time.sleep(65)

        # Put a message in the queue.
        m1 = Message()
        m1.set_body('This is a test message.')
        test.write(m1)
        self.assertEqual(test.count(), 1)

        # Check all attributes.
        msgs = test.get_messages(
            num_messages=1,
            attributes='All'
        )
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            first_rec = msg.attributes['ApproximateFirstReceiveTimestamp']
            first_rec = int(first_rec) / 1000
            self.assertTrue(first_rec >= current_timestamp)

        # Put another message in the queue.
        m2 = Message()
        m2.set_body('This is another test message.')
        test.write(m2)
        self.assertEqual(test.count(), 1)

        # Check a specific attribute.
        msgs = test.get_messages(
            num_messages=1,
            attributes='ApproximateReceiveCount'
        )
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            with self.assertRaises(KeyError):
                msg.attributes['ApproximateFirstReceiveTimestamp']
Example #36
0
def process_links(article_title):
	print "processing links for " + article_title
	links_tree = lxml.etree.parse(LINKS_ROOT + article_title)
	link_nodes = links_tree.xpath("//pl[@ns='0' and @exists='']")
	links = [l.text for l in link_nodes]

	#add links to the queue if they haven't been seen before
	if(LOCAL_TEST):
		for title in links:
			if (not status.has_key(title)):
				status[title] = (0, 0.0)
				message = Message()
				message.set_body(title.encode('utf-8'))
				q.write(message)
				print "adding article to queue: " + title
	else:
		request_list = []
		for t in links:
			request_list = request_list + [dict(title=t)]
	
		#returns list of dicts - each dict is one item from the database
		present_links = database.batch_get(keys=request_list)
	
		with database.batch_write() as batch:
			for r in request_list:
				#find titles in the returned list that match requested titles
				# if none exist, add the item to the database and queue
				matches = [item for item in present_links if item['title'] == r['title']]
				if len(matches) == 0:
					add_time = str(datetime.datetime.now())
					batch.put_item(data={
						'title': r['title'],
						'status': 'queued',
						'difficulty': Decimal(0.0),
						'time': add_time,
					})
					message = Message()
					message.set_body(r['title'].encode('utf-8'))
					q.write(message)
					print "adding article to queue: " + r['title']
Example #37
0
def createQ():
    conn = boto.sqs.connect_to_region(
        "eu-west-1",
        aws_access_key_id='AKIAIR7EH3TNSTDUCWKA',
        aws_secret_access_key='t2FZT5mrLYy8gX7kS1q0p4ObQYXTwGnaiUm+rxHZ')
    q = conn.create_queue("queue_jabba")
    #write 1 message to queue
    m = Message()
    m.set_body('first message')
    q.write(m)
    #write 100 messages to queue
    for x in range(1, 100):
        mx = Message()
        mx.set_body('Message' + x)
        q.write(mx)
    #read message from queue
    rs = q.get_messages()
    mread = rs[0]
    mread.get_body()
    #delete message from queue
    q.delete_message(m)
    return mread
Example #38
0
 def timeout(self):
     from boto.sqs.message import Message
     if not self._queue and self._connected:
         try:
             from boto.sqs.connection import SQSConnection
             sqs_connection = SQSConnection(self._access_key,
                                            self._secret_access_key,
                                            **self._connection_kwargs)
             queues = sqs_connection.get_all_queues(prefix=self._queue_name)
             if len(queues) == 0:
                 raise Exception('Queue %s does not exist' %
                                 self._queue_name)
             self._queue = queues[0]
         except Exception as e:
             LOGGER.error('Could not connect to logging queue %s' %
                          self._queue_name,
                          exc_info=e)
             self._connected = False
     while len(self._records_to_emit) and self._connected:
         record = self._records_to_emit.pop()
         self._queue.write(Message(body=record))
Example #39
0
    def on_data(self, data):

        try:
            status_wrapper = TextWrapper(width=60,
                                         initial_indent='    ',
                                         subsequent_indent='    ')
            twitter_data = json.loads(data)
            m = Message()
            if ('coordinates' in twitter_data.keys()):
                if (twitter_data['coordinates'] is not None):
                    tweet = {
                        'id':
                        twitter_data['id'],
                        'time':
                        twitter_data['timestamp_ms'],
                        'text':
                        twitter_data['text'].lower().encode(
                            'ascii', 'ignore').decode('ascii'),
                        'coordinates':
                        twitter_data['coordinates'],
                        'place':
                        twitter_data['place'],
                        'handle':
                        twitter_data['user']['screen_name'],
                        'sentiment':
                        ""
                    }

                    global count
                    count += 1
                    print(count)
                    print(tweet)
                    sqs.send_message(
                        QueueUrl='',
                        MessageBody=json.dumps(tweet))  #Adding data to Queue1
                    return True
        except BaseException as e:
            print("Error on_data: %s" % str(e))
        return True
def taskProcess(msg, jObj):
    print jObj["task"]
    job = jObj["task"].split(' ')
    try:
        exe = 'time.' + job[0] + '(' + str(float(job[1]) / 1000) + ')'
        print 'Executing ... ' + exe
        exec(exe)
        print 'successful'
        m1 = Message()
        j = responseMessage(jObj["clientID"], jObj["jobID"], "1")
        m1.set_body(j)
        processed_queue.write(m1)
    except Exception as e:
        m = Message()
        m.set_body(jObj)
        task_queue.write(m)
        key = jObj["clientID"] + ";" + jObj["jobID"]
        item = table.get_item(hash_key=key)
        item['Body'] = 'False'
        item.put()
        print 'Interrupted' + str(e)

    global threadCount
    threadCount = threadCount - 1
Example #41
0
parser.add_argument("qname")

args = parser.parse_args()

 

conn = boto.sqs.connect_to_region("us-west-2a")

q = conn.get_queue(args.qname)

 

try:

	m = Message()

	m = q.read(60)

	str1 = m.get_body()

	print "Message read = ",  str1

except:

	print "Could not read message"




    def do_activity(self, data=None):
        """
        Do the work
        """
        if self.logger:
            self.logger.info('data: %s' %
                             json.dumps(data, sort_keys=True, indent=4))

        run = data['run']
        session = Session(self.settings)
        version = session.get_value(run, 'version')
        article_id = session.get_value(run, 'article_id')

        self.emit_monitor_event(
            self.settings, article_id, version, run, self.pretty_name, "start",
            "Starting preparation of article for EIF " + article_id)

        try:
            eif_location = session.get_value(run, 'eif_location')
            eif_bucket = self.settings.publishing_buckets_prefix + self.settings.eif_bucket

            article_path = session.get_value(run, 'article_path')
            self.set_monitor_property(self.settings,
                                      article_id,
                                      'path',
                                      article_path,
                                      'text',
                                      version=version)

            expanded_folder = session.get_value(run, 'expanded_folder')
            status = session.get_value(run, 'status')

            update_date = session.get_value(run, 'update_date')

            carry_over_data = {
                'eif_location': eif_location,
                'eif_bucket': eif_bucket,
                'passthrough': {
                    'article_id': article_id,
                    'version': version,
                    'run': run,
                    'article_path': article_path,
                    'expanded_folder': expanded_folder,
                    'status': status,
                    'update_date': update_date,
                }
            }

            message = carry_over_data

            sqs_conn = boto.sqs.connect_to_region(
                self.settings.sqs_region,
                aws_access_key_id=self.settings.aws_access_key_id,
                aws_secret_access_key=self.settings.aws_secret_access_key)

            out_queue = sqs_conn.get_queue(self.settings.website_ingest_queue)
            m = Message()
            m.set_body(json.dumps(message))
            out_queue.write(m)

            #########

        except Exception as e:
            self.logger.exception("Exception when Preparing for PostEIF")
            self.emit_monitor_event(
                self.settings, article_id, version, run, self.pretty_name,
                "error", "Error submitting EIF For article" + article_id +
                " message:" + str(e.message))
            return False

        self.emit_monitor_event(
            self.settings, article_id, version, run, self.pretty_name, "end",
            "Finished preparation of article for EIF " + article_id)
        return True
Example #43
0
def sqsput(filename):
    print filename
    m = Message()
    m.set_body(filename)
    q.write(m)
def write_message_to_queue(queue):
    m = Message()
    m.set_body('request_ip')
    queue.write(m)
    print m
Example #45
0
# this script expects 2 environment variables
#    1. SQS_KEY_ID (preferably an IAM user with limited rights)
#    2. SQS_SECRET_KEY (accompanying secret key)
#    3. SQS_TASK_QUEUE (the queue to use)

import os
import sys

from boto.sqs.connection import SQSConnection
from boto.sqs.message import Message

import utils

# your amazon keys
key = os.environ['SQS_KEY_ID']
access = os.environ['SQS_ACCESS_KEY']
queue = os.environ['SQS_TASK_QUEUE']

if __name__ == '__main__':
    region_info = utils.get_region_info()
    sqs = SQSConnection(key, access, region=region_info)

    tasks = sqs.create_queue(queue)

    m = Message()
    m.set_body(sys.argv[1])
    tasks.write(m)
Example #46
0
import sys
import signal
import time
from boto.sqs.message import Message
from subprocess import call

parser = argparse.ArgumentParser()
parser.add_argument("echo")
args = parser.parse_args()

conn = boto.sqs.connect_to_region(
    "us-east-1",
    aws_access_key_id='AKIAINWVSI3MIXIB5N3Q',
    aws_secret_access_key='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')
my_queue = conn.get_queue(args.echo)
m = Message()

print "Reading Queue " + args.echo + " Messages read are deleted"

while True:
    try:

        try:
            rs = my_queue.get_messages(num_messages=10,
                                       visibility_timeout=12000,
                                       wait_time_seconds=5)
            if (len(rs) == 0):
                print args.echo + " len = 0 now empty"
                sys.exit(0)
            for msgcounter in range(len(rs)):
                m = rs[msgcounter - 1]
Example #47
0
 def _put(self, queue, message, **kwargs):
     """Put message onto queue."""
     q = self._new_queue(queue)
     m = Message()
     m.set_body(dumps(message))
     q.write(m)
Example #48
0
while True:
    '''
      EXTEND:
      Replace the following line with code to read a message off the
      input queue, convert from JSON to a Python dict, and assign to
      `req`.
    '''
    #rs is queue from front end, going to read from it
    rs = q1.get_messages()
    if (len(rs) > 0):
        m = rs[0]
        msg = m.get_body()
        #Convert json object from sqs and convert to dict so we can code..
        ds = json.loads(msg)
        actual_s = random.randint(0, ds['seconds'])
        time.sleep(actual_s)
        ds['actual_s'] = actual_s
        q1.delete_message(m)
    '''
      EXTEND:
      Replace the following line with code to put the response on the
      output queue, in JSON representation.
    '''
    if (len(rs) > 0):
        m = Message()
        js = json.dumps(ds)
        m.set_body(js)
        q2.write(
            m)  #output queue from worker so we can get backend.py to read it.
        print "Output Queue Written"
Example #49
0
JC_key = bucket.get_key(folder + '/' + JC_fname)
if JC_key is None:
    JC_key = bucket.new_key(folder + '/' + JC_fname)
    try:
        JC_key.set_contents_from_filename(JC_fname)
    except S3ResponseError as e:
        sys.exit(-1)
    bucket.set_acl('public-read', JC_key.name)

reskey = bucket.get_key(folder + '/' + res_fname)
if reskey is not None:
    reskey.delete()

mtxt = json.dumps({'bucket': bucket.name, 'folder': folder, 'argv': argv})
m = Message()
m.set_body(mtxt)
status = q.write(m)

reskey = bucket.get_key(folder + '/' + res_fname)
while reskey is None:
    time.sleep(polling_wait_time)
    reskey = bucket.get_key(folder + '/' + res_fname)

reskey.get_contents_to_filename(res_fname)
res_f = open(res_fname, 'r')
exitcode = int(res_f.readline())
res_f.close()

logkey = bucket.get_key(folder + '/' + log_fname)
if logkey is not None:
Example #50
0
def writeToSQS(messageBody):
    # Note that messages are base64 encoded.
    m1 = Message()
    m1.set_body(messageBody)
    q.write(m1)
 def add(self, message):
     message = to_data(message)
     m = Message()
     m.set_body(value2json(message))
     self.queue.write(m)
 def on_data(self, data):
     msg = pickle.dumps(data)
     m = Message()
     m.set_body(msg)
     status = q[0].write(m)
     return True
Example #53
0
 def send_message(self, queue_name=None, msg=None):
     q = self.conn.lookup(queue_name)
     m = Message()
     m.set_body(msg)
     q.write(m)
Example #54
0
 def add(self, name, **spider_args):
     d = spider_args.copy()
     d['name'] = name
     msg = Message(body=json.dumps(d))
     return threads.deferToThread(self._queue_method, 'write', msg)
def write_message_to_queue(queue):
    m = Message()
    m.set_body(ipgetter.myip())
    queue.write(m)
    print m
Example #56
0
def send(message):
    global q
    m = Message()
    m.set_body(message)
    q.write(m)
Example #57
0
import boto.sqs
from boto.sqs.message import Message
import os
import config

conn = boto.sqs.connect_to_region("us-west-2",
                                  aws_access_key_id=config.sqs_access_key,
                                  aws_secret_access_key=config.sqs_access_secret)

queue = conn.get_queue("Image")
queuepkg = conn.get_queue("Package")


while (True):
    messages = queue.get_messages()
    for mes in messages:
        dname = mes.get_body()
        print dname
        res = os.popen('./script.sh ' + dname)
        for i in range(4):
           res.readline()
        pkglist = res.read()
	# print pkglist
        # txt = open("file.txt", "r")
        # pkglist = txt.read()
        mes = Message()
        mes.set_body(pkglist)
        queuepkg.write(mes)
    if (len(messages) != 0):
        queue.delete_message_batch(messages)
def test_single_path_document_zip(registry_mock, connection_mock,
                                  document_mock, job_mock, parent_url_mock,
                                  get_document_mock):
    conn_s3 = boto.connect_s3()
    bucket = conn_s3.create_bucket('storage-bucket')
    conn_s3.create_bucket('files-bucket')

    registry = MagicMock()
    registry.get = MagicMock(side_effect=side_effect)

    registry_mock.return_value = registry

    message = Message()
    message.set_body(
        json.dumps({
            'Message': '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e',
            'Subject': 'Test Subject'
        }))

    job = MagicMock(
        **{
            'uuid': '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e',
            'name': 'Migration Download',
            'status': 'pending',
            'message': {
                'documents': [{
                    'parent_id':
                    '56d3c182-f72f-4216-9e94-1756bf67564d'
                }]
            }
        })
    job.set = MagicMock()

    document_mock.query.return_value = [
        MagicMock(
            **{
                'uuid': '56d3c182-f72f-4216-9e94-1756bf67564d',
                'created': datetime(2015, 5, 17),
                'url': '/'
            }),
        MagicMock(
            **{
                'uuid': '79254d0b-0902-4697-89d1-4be8ff3acd69',
                'created': datetime(2015, 5, 17),
                'url': 'test',
                'type': 'File'
            })
    ]

    key1 = Key(bucket, '17/5/2015/56d3c182-f72f-4216-9e94-1756bf67564d')
    key1.set_contents_from_string(
        json.dumps({
            'document': {
                'id': 1,
                'uuid': '56d3c182-f72f-4216-9e94-1756bf67564d',
                'created': str(datetime(2015, 5, 17)),
                'url': '/',
                'parent': 0,
                'path': '1/',
            }
        }))

    key2 = Key(bucket, '17/5/2015/79254d0b-0902-4697-89d1-4be8ff3acd69')
    key2.set_contents_from_string(
        json.dumps({
            'document': {
                'id': 2,
                'uuid': '79254d0b-0902-4697-89d1-4be8ff3acd69',
                'created': str(datetime(2015, 5, 17)),
                'url': 'test',
                'parent': 1,
                'path': '1/2',
                'type': 'File'
            },
            'file': {
                "bucket": "storage-bucket",
                'key': '17/5/2015/a984dea7-8140-44cb-80a0-7e832ff1ff19'
            }
        }))

    key3 = Key(bucket, '17/5/2015/a984dea7-8140-44cb-80a0-7e832ff1ff19')
    key3.set_contents_from_string('Hello World')

    job_mock.selectBy.return_value.getOne.return_value = job

    service = MigrationDownloadJob()

    get_document_mock.return_value = IN('uuid', [
        '56d3c182-f72f-4216-9e94-1756bf67564d',
        '79254d0b-0902-4697-89d1-4be8ff3acd69'
    ])

    def parent_side_effects(parent_id):
        return MagicMock(
            url='/',
            uuid='56d3c182-f72f-4216-9e94-1756bf67564d') if parent_id else None

    parent_url_mock.side_effect = parent_side_effects

    service.do_work(message)

    key = Key(bucket, '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e')
    contents = StringIO(key.get_contents_as_string())
    handle = zipfile.ZipFile(contents, 'r', compression=zipfile.ZIP_DEFLATED)
    assert key.exists()
    assert handle.namelist() == [
        '56d3c182-f72f-4216-9e94-1756bf67564d',
        '79254d0b-0902-4697-89d1-4be8ff3acd69',
        '17/5/2015/a984dea7-8140-44cb-80a0-7e832ff1ff19', 'manifest'
    ]
    assert job.set.call_args_list == [
        call(status='running'),
        call(message={
            'documents': [{
                'parent_id': '56d3c182-f72f-4216-9e94-1756bf67564d'
            }],
            'download': {
                'key': '9bd96ca7-3d0a-4e74-b523-b3bd38e9862e',
                'bucket': 'storage-bucket'
            }
        },
             status='complete')
    ]
Example #59
0
 def emit(self, record):
     m = Message()
     m.set_body(record.msg)
     self.q.write(m)
Example #60
0
def publish_to_sqs(data): 
    m = Message() 
    m.set_body(data) 
    status = q[0].write(m) 
    return status