Esempio n. 1
0
class TaskSQS:

    TASK_QUEUE = "tasks"
    RESULT_QUEUE = "results" 
    VISIBILITY_TIMEOUT = 120
    
    def __init__(self):
        self.conn = None
        self.taskq = None
        self.resultq = None

    def connect(self):
        # open connection
        self.conn = SQSConnection()
        
        # initialize queues
        self.taskq = self.conn.create_queue(self.TASK_QUEUE, self.VISIBILITY_TIMEOUT)
        self.taskq.set_message_class(JSONMessage)
        self.resultq = self.conn.create_queue(self.RESULT_QUEUE, self.VISIBILITY_TIMEOUT)
        self.resultq.set_message_class(JSONMessage)
    
    def clear(self):
        self.taskq.clear()
        self.resultq.clear()
#        self.conn.delete_queue(self.taskq)
#        self.conn.delete_queue(self.resultq)
        self.taskq = None
        self.resultq = None
    
    def new_task(self, task):
        return JSONMessage(self.taskq, task)
    
    def new_result(self, result):
        return JSONMessage(self.resultq, result)

    def put_task(self, task):
        self.taskq.write(task)
        
    def next_task(self):
        next = None
        while next is None:
            next = self.taskq.read()
        return next
    
    def complete(self, task, result=None):
        self.taskq.delete_message(task)
        if result is not None:
            self.put_result(result)
    
    def put_result(self, result):
        self.resultq.write(result)

    def get_result(self):
        result = self.resultq.read()
        if result is not None:
            self.resultq.delete_message(result)
        return result
Esempio n. 2
0
def main():
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'hcq:o:t:',
                                   ['help', 'clear', 'queue',
                                    'output', 'timeout'])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
    c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError, e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
Esempio n. 3
0
 def __init__(self, *args, **kwargs):
     Queue.__init__(self, *args, **kwargs)
     c = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
     self.queue = c.lookup(self.name)
     if not self.queue:
         self.queue = c.create_queue(self.name, 1)
     self.connection = c
Esempio n. 4
0
class SqsApi(object):
    def __init__(self):
        self.conn = SQSConnection(aws_access_key_id=AWS_ACCESS_KEY,
                                  aws_secret_access_key=AWS_SECRET_KEY)

    def create_queue(self, name="MuscleOpsQ"):
        return self.conn.create_queue(name)

    def list_queues(self):
        return self.conn.get_all_queues()

    def write_message(self,
                      q,
                      msg="If you're sleepy and you know it; clap your hands!"
                      ):
        m = RawMessage()
        m.set_body(msg)
        q.write(m)

    def read_message(self,
                     q,
                     msg="If you're sleepy and you know it; clap your hands!"):
        rs = q.get_messages()
        m = rs[0].get_body() if len(rs) else msg
        return m
Esempio n. 5
0
    def test_sqs_timeout(self):
        c = SQSConnection()
        queue_name = 'test_sqs_timeout_%s' % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        start = time.time()
        poll_seconds = 2
        response = queue.read(visibility_timeout=None,
                              wait_time_seconds=poll_seconds)
        total_time = time.time() - start
        self.assertTrue(
            total_time > poll_seconds,
            "SQS queue did not block for at least %s seconds: %s" %
            (poll_seconds, total_time))
        self.assertIsNone(response)

        # Now that there's an element in the queue, we should not block for 2
        # seconds.
        c.send_message(queue, 'test message')
        start = time.time()
        poll_seconds = 2
        message = c.receive_message(queue,
                                    number_messages=1,
                                    visibility_timeout=None,
                                    attributes=None,
                                    wait_time_seconds=poll_seconds)[0]
        total_time = time.time() - start
        self.assertTrue(
            total_time < poll_seconds,
            "SQS queue blocked longer than %s seconds: %s" %
            (poll_seconds, total_time))
        self.assertEqual(message.get_body(), 'test message')

        attrs = c.get_queue_attributes(queue, 'ReceiveMessageWaitTimeSeconds')
        self.assertEqual(attrs['ReceiveMessageWaitTimeSeconds'], '0')
Esempio n. 6
0
    def test_worker_consumes_queue(self):
        sqs = SQSConnection(region=get_sqs_region('us-east-1'))

        self.assertEqual(len(sqs.get_all_queues()), 0)

        queue = sqs.create_queue('test_events')

        queue.write(make_message({'test': '1'}))

        self.assertEqual(queue.count(), 1)

        worker = Worker()
        worker.ctrl.wait_time_seconds = 0.1
        worker.idle_time_seconds = 0.1
        worker.add_consumer('test_events', dummy_consumer)

        with spawn_worker(worker):
            time.sleep(.2)
            self.assertEqual(queue.count(), 0)

            queue.write(make_message({'test': '2'}))

            self.assertEqual(queue.count(), 1)

            time.sleep(.2)

            self.assertEqual(queue.count(), 0)
Esempio n. 7
0
    def test_sqs_timeout(self):
        c = SQSConnection()
        queue_name = "test_sqs_timeout_%s" % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        start = time.time()
        poll_seconds = 2
        response = queue.read(visibility_timeout=None, wait_time_seconds=poll_seconds)
        total_time = time.time() - start
        self.assertTrue(
            total_time > poll_seconds,
            "SQS queue did not block for at least %s seconds: %s" % (poll_seconds, total_time),
        )
        self.assertIsNone(response)

        # Now that there's an element in the queue, we should not block for 2
        # seconds.
        c.send_message(queue, "test message")
        start = time.time()
        poll_seconds = 2
        message = c.receive_message(
            queue, number_messages=1, visibility_timeout=None, attributes=None, wait_time_seconds=poll_seconds
        )[0]
        total_time = time.time() - start
        self.assertTrue(
            total_time < poll_seconds, "SQS queue blocked longer than %s seconds: %s" % (poll_seconds, total_time)
        )
        self.assertEqual(message.get_body(), "test message")

        attrs = c.get_queue_attributes(queue, "ReceiveMessageWaitTimeSeconds")
        self.assertEqual(attrs["ReceiveMessageWaitTimeSeconds"], "0")
Esempio n. 8
0
 def request_images(self, params):
     if len(params)==0: return
     
     scene = params[0]['scene']
     scene = scene.replace('.3dm','')
     lowpriority = ""
     #print "low_priority"
     #print params[0]['low_priority']
     if params[0]['low_priority']==True:
         lowpriority = "_lowpriority"
     q_name = "%s%s_%s_%s" % (self.site_name, lowpriority, scene, 'request')
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     q = conn.create_queue(q_name)
     q.set_message_class(Message)
     params = self.trunc_params(params)
     messages = []
     for i in range(len(params)):
         #params[i]['params']['textParam'] = 'sunsun'
         body = simplejson.dumps(params[i])
         
         sys.stderr.write(body + "\n")
         messages.append((i,base64.b64encode(body),0))
        
     for j in range(int(math.ceil(float(len(params))/10.0))):
         conn.send_message_batch(q, messages[j*10:(j+1)*10])
     sys.stderr.write("\n\n\nSent messages\n\n\n")
     return
Esempio n. 9
0
    def __init__(self, conf, name):
        super(MessageQueue, self).__init__(conf, name)

        conn = SQSConnection(conf['access_key'], conf['secret_access_key'])
        self._queue = conn.create_queue(self.name)
        self._queue.set_attribute('MessageRetentionPeriod', base.FOURTEEN_DAYS)
        self._vtime = base.get_vtime(conf)
Esempio n. 10
0
class UploadCompleteHandler(tornado.web.RequestHandler):

    def prepare(self):
        from boto.sqs.connection import SQSConnection
        self.conn = SQSConnection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
        self._q = self.conn.create_queue('FZconvertQueue', 120)

        from boto.s3.connection import S3Connection
        self.s3conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)

    def get(self): 

        completeDebugDict = {}
        # completeDebugDict['bucket'] = self.get_arguments('bucket')[0]
        completeDebugDict['key'] = self.get_arguments('key')[0]
        completeDebugDict['parseID'] = self.get_arguments('id')[0]
        self._bucket = self.s3conn.create_bucket('figurezero')
        k = self._bucket.get_key(completeDebugDict['key'])

        rootID = completeDebugDict['key'].split('/')[1]
        completeDebugDict['tag'] = k.get_metadata('tag')
        completeDebugDict['rootID'] = rootID
        # completeDebugDict['parseID'] = k.get_metadata('parse')
        completeDebugDict['emailAddress'] = k.get_metadata('email')
        completeDebugDict['size'] = k.size

        from boto.sqs.message import Message
        m = Message()
        m.set_body(json.dumps(completeDebugDict))
        status = self._q.write(m)

        self.redirect('/approve/%s' % (completeDebugDict['parseID']))
Esempio n. 11
0
 def queue(self):
     if self._queue:
         return self._queue
     conn = SQSConnection(self._aws_access_key_id, self._aws_secret_access_key, self._is_secure, self._port, region=self._region)
     self._queue = conn.create_queue(self._queue_name)
     self._queue.set_message_class(self._message_class)
     return self._queue
Esempio n. 12
0
    def test_sqs_longpoll(self):
        c = SQSConnection()
        queue_name = "test_sqs_longpoll_%s" % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        messages = []

        # The basic idea is to spawn a timer thread that will put something
        # on the queue in 5 seconds and verify that our long polling client
        # sees the message after waiting for approximately that long.
        def send_message():
            messages.append(queue.write(queue.new_message("this is a test message")))

        t = Timer(5.0, send_message)
        t.start()
        self.addCleanup(t.join)

        start = time.time()
        response = queue.read(wait_time_seconds=10)
        end = time.time()

        t.join()
        self.assertEqual(response.id, messages[0].id)
        self.assertEqual(response.get_body(), messages[0].get_body())
        # The timer thread should send the message in 5 seconds, so
        # we're giving +- .5 seconds for the total time the queue
        # was blocked on the read call.
        self.assertTrue(4.5 <= (end - start) <= 5.5)
Esempio n. 13
0
    def test_get_messages_attributes(self):
        conn = SQSConnection()
        current_timestamp = int(time.time())
        queue_name = 'test%d' % int(time.time())
        test = conn.create_queue(queue_name)
        self.addCleanup(conn.delete_queue, test)
        time.sleep(65)

        # Put a message in the queue.
        m1 = Message()
        m1.set_body('This is a test message.')
        test.write(m1)
        self.assertEqual(test.count(), 1)

        # Check all attributes.
        msgs = test.get_messages(num_messages=1, attributes='All')
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            first_rec = msg.attributes['ApproximateFirstReceiveTimestamp']
            first_rec = int(first_rec) / 1000
            self.assertTrue(first_rec >= current_timestamp)

        # Put another message in the queue.
        m2 = Message()
        m2.set_body('This is another test message.')
        test.write(m2)
        self.assertEqual(test.count(), 1)

        # Check a specific attribute.
        msgs = test.get_messages(num_messages=1,
                                 attributes='ApproximateReceiveCount')
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            with self.assertRaises(KeyError):
                msg.attributes['ApproximateFirstReceiveTimestamp']
Esempio n. 14
0
def main():
    try:
        opts, args = getopt.getopt(
            sys.argv[1:], 'hcq:o:t:',
            ['help', 'clear', 'queue', 'output', 'timeout'])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
    c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError, e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
Esempio n. 15
0
File: models.py Progetto: tml/norc
 def __init__(self, *args, **kwargs):
     Queue.__init__(self, *args, **kwargs)
     c = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
     self.queue = c.lookup(self.name)
     if not self.queue:
         self.queue = c.create_queue(self.name, 1)
     self.connection = c
Esempio n. 16
0
    def test_sqs_longpoll(self):
        c = SQSConnection()
        queue_name = 'test_sqs_longpoll_%s' % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        messages = []

        # The basic idea is to spawn a timer thread that will put something
        # on the queue in 5 seconds and verify that our long polling client
        # sees the message after waiting for approximately that long.
        def send_message():
            messages.append(
                queue.write(queue.new_message('this is a test message')))

        t = Timer(5.0, send_message)
        t.start()
        self.addCleanup(t.join)

        start = time.time()
        response = queue.read(wait_time_seconds=10)
        end = time.time()

        t.join()
        self.assertEqual(response.id, messages[0].id)
        self.assertEqual(response.get_body(), messages[0].get_body())
        # The timer thread should send the message in 5 seconds, so
        # we're giving +- .5 seconds for the total time the queue
        # was blocked on the read call.
        self.assertTrue(4.5 <= (end - start) <= 5.5)
Esempio n. 17
0
 def test_credentialled_SQS_access(self):
     "check that the credentials can access the SQS service"
     creds = aws_credentials.get_credentials()
     region_name = 'eu-west-1'
     region = [r for r in regions() if r.name == region_name][0]
     conn = SQSConnection(aws_access_key_id=creds[0], aws_secret_access_key=creds[1], region=region)
     q = conn.create_queue("PLEASE_KEEP_FOR_TESTING", 30)
     assert q
Esempio n. 18
0
 def get_lowpriority_wait_count(self, scenes): 
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) 
     count = 0
     for scene in scenes:
         q_name = "%s%s_%s_%s" % (self.site_name, '_lowpriority', scene, 'request')
         q = conn.create_queue(q_name)
         count += q.count()
     return count
Esempio n. 19
0
def main():
    try:
        opts, args = getopt.getopt(
            sys.argv[1:], 'hcq:o:t:r:',
            ['help', 'clear', 'queue=', 'output=', 'timeout=', 'region='])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    region = ''
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
        if o in ('-r', '--region'):
            region = a
    if region:
        c = boto.sqs.connect_to_region(region)
    if c is None:
        print 'Invalid region (%s)' % region
        sys.exit(1)
    else:
        c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError as e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
    else:
        try:
            rs = c.get_all_queues()
        except SQSError as e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
    for q in rs:
        if clear:
            n = q.clear()
            print 'clearing %d messages from %s' % (n, q.id)
        elif output_file:
            q.dump(output_file)
        else:
            print q.id, q.count(vtimeout=timeout)
Esempio n. 20
0
    def setup_sqs_queue(self):
        conn = SQSConnection()
        q = conn.create_queue('some-queue')

        m = Message()
        m.set_body('This is my first message.')
        q.write(m)

        self.assertEqual(q.count(), 1)
Esempio n. 21
0
 def adjust_ghx(self, file_name, scene):
     scene = scene.replace('.3dm','')
     q_name = "%s_%s_%s" % (self.site_name, scene, 'request')
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     q = conn.create_queue(q_name)
     q.set_message_class(Message)
     body = simplejson.dumps({'operation': 'adjust_ghx', 'gh_file': file_name})
     sys.stderr.write(body + "\n")
     conn.send_message_batch(q, [(0,base64.b64encode(body),0)])
Esempio n. 22
0
File: aws.py Progetto: aerwin3/qork
    def __init__(self, conf, name):
        super(MessageQueue, self).__init__(conf, name)

        conn = SQSConnection(
            conf['access_key'], conf['secret_access_key'])
        self._queue = conn.create_queue(self.name)
        self._queue.set_attribute(
            'MessageRetentionPeriod', base.FOURTEEN_DAYS)
        self._vtime = base.get_vtime(conf)
Esempio n. 23
0
File: sqs.py Progetto: imclab/pi-eye
def push(body, queue='pi-status'):
    '''
    Create a JSON-encoded boto-style Message object and write it to the queue.
    '''
    sqs = SQSConnection()
    sqs_queue = sqs.create_queue(queue)

    message = JSONMessage(body=body)
    sqs_queue.write(message)
Esempio n. 24
0
    def setup_sqs_queue(self):
        conn = SQSConnection()
        q = conn.create_queue('some-queue')

        m = Message()
        m.set_body('This is my first message.')
        q.write(m)

        self.assertEqual(q.count(), 1)
Esempio n. 25
0
class TweetQueue:
    def __init__(self,config):
        self.conn = SQSConnection(config["aws-s3"]["access-key-id"], config["aws-s3"]["secret-access-key"])
        self.q = self.conn.create_queue('condor-tweets')

    def enqueueTweets(self,tweets):
        if len(tweets) > 0:
            m = Message()
            m.set_body(json.dumps(map(slimTweet,tweets)))
            self.q.write(m)
Esempio n. 26
0
class UserQueue:
    def __init__(self,config):
        self.conn = SQSConnection(config["aws-s3"]["access-key-id"], config["aws-s3"]["secret-access-key"])
        self.q = self.conn.create_queue('condor-users')

    def enqueueUsers(self,users):
        if len(users) > 0:
            m = Message()
            m.set_body(json.dumps(users))
            self.q.write(m)
Esempio n. 27
0
def queue(q_name, msg):

    conn_logger.debug('queue')
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    m = Message()
    m.set_body(msg)
    rs = q.write(m)
    
    conn_logger.debug('put "%s" to %s queue: ' % (msg, q_name))
Esempio n. 28
0
def queue(q_name, msg):

    conn_logger.debug('queue')
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    m = Message()
    m.set_body(msg)
    rs = q.write(m)

    conn_logger.debug('put "%s" to %s queue: ' % (msg, q_name))
class islandoraEncodingDaemon(Daemon):
    def run(self, config_file):
        self.init_config(config_file)
        self.init_logger()
        self.init_sqs()
        self.max_workers = self.config.getint('Threading', 'number_workers')
        self.sleep_time = self.config.getint('Threading', 'sleep_time')
        while True:
            self.logger.info('Daemon looking for jobs for workers.')
            for worker_id in range(self.max_workers):
                self.logger.info('Daemon reports queue length is currently %s.', self.queue.count())
                next_queue_message = self.queue.read()
                if next_queue_message is not None:
                    self.logger.info('Daemon found a job - assigning to worker %s.', worker_id)
                    worker = islandoraEncodingWorker(
                                                   worker_id,
                                                   next_queue_message,
                                                   self.queue,
                                                   self.logger,
                                                   self.config
                                                   )
                    worker.start()
            for thread in threading.enumerate():
                if thread is not threading.currentThread():
                    thread.join()
            self.logger.info('All workers retired, daemon sleeping for %s seconds.', self.sleep_time)
            time.sleep(self.sleep_time)

    def init_config(self, config_filepath):
        self.config = ConfigParser.SafeConfigParser()
        self.config.read(config_filepath)

    def init_logger(self):
        self.logger = logging.getLogger('newspaper_encoding')
        self.hdlr = logging.FileHandler(self.config.get('Logging', 'log_file'))
        logLevelValue = getattr(
                              logging,
                              self.config.get('Logging', 'log_level')
                              )
        self.logger.setLevel(logLevelValue)
        self.hdlr.setLevel(logLevelValue)
        self.formatter = logging.Formatter(self.config.get('Logging', 'log_format'))
        self.hdlr.setFormatter(self.formatter)
        self.logger.addHandler(self.hdlr)

    def init_sqs(self):
        try:
            self.sqsConn = SQSConnection(
                                         self.config.get('SQS', 'aws_access_key'),
                                         self.config.get('SQS', 'aws_secret_key')
                                         )
            self.queue = self.sqsConn.create_queue(self.config.get('SQS', 'sqs_queue_name'))
        except:
            self.logger.error('Cannot initialize SQS queue (check credentials?)')
 def sendMessageToSQSQueue(self, message_body, queue_name):
     try:
         sqsConn = SQSConnection(self.access_key, self.secret_key)
         queue = sqsConn.create_queue(queue_name)
         new_item_message = Message()
         new_item_message.set_body(message_body)
         if queue.write(new_item_message) :
             self.logger.info('Message added to processing queue.')
         else :
             self.logger.error('SQS service did not accept object into the queue.')
     except Exception, e:
         self.logger.error('Message could not be added to processing queue. ' + str(e))
Esempio n. 31
0
File: sqs.py Progetto: imclab/pi-eye
def pop_loop(queue='pi-status', wait=5):
    # like pop(), but iterate
    sqs = SQSConnection()
    sqs_queue = sqs.create_queue(queue)
    sqs_queue.set_message_class(JSONMessage)

    while True:
        message = sqs_queue.read(wait_time_seconds=wait)
        if message is not None:
            body = message.get_body()
            message.delete()
            yield body
Esempio n. 32
0
def create_queue(name):
    if settings.OPENSOCIAL_DEBUG: # ローカル環境の場合なにもしない
        return None

    if not name:
        raise JobQueueError('Not specified queue name')

    # Connect to SQS queue
    sri = SQSRegionInfo(name='ap-northeast-1', endpoint='ap-northeast-1.queue.amazonaws.com')
    conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region=sri)
    q = conn.create_queue(name)
    return q
Esempio n. 33
0
def process_config():
    config = ConfigParser.ConfigParser()
    config.read(config_file)

    try:
        tmp_dir = config.get("global", "tmp_dir")
        log_file = config.get("global", "log_file")
        log_level = config.get("global", "log_level")
    except:
        pass

    s3logger.setLevel(LEVELS.get(log_level, logging.INFO))
    file_handler = logging.handlers.WatchedFileHandler(log_file)
    s3logger.addHandler(file_handler)

    log_label("starting ..." )
    log_msg( "Temp dir: %s" % tmp_dir)

    try:
        # AWS auth details
        aws_access_key = config.get("AWSauth", "aws_access_key")
        aws_secret_key = config.get("AWSauth", "aws_secret_key")

        # S3 configuration details
        s3_bucket = config.get("S3config", "s3_bucket")

        # SQS job queue configuration details
        queue_name = config.get("SQSconfig", "queue_name")
        conn = SQSConnection(aws_access_key, aws_secret_key)
        queue = conn.create_queue(queue_name)
        queue.set_message_class(MHMessage)

        sleep_time = float(config.get("SQSconfig", "sleep_time"))
        log_msg("sleep time: %s" % sleep_time)

        for section in config.sections():
            if re.search("job", section):
                suffix = config.get(section, "match")
                log_msg("processing section [%s]" % section)
               
                job_match[suffix] = []
                for option in config.options(section):
                    if re.search("exec", option):
                        job_match[suffix].append( config.get(section, option) )
                       
                log_debug("CMDs for suffix [png]") 
                for cmd in job_match[suffix]:
                    log_debug("   %s" % cmd)

    except Exception, e:
        log_err("Error reading config file [%s]: %s" % (config_file, e))
        sys.exit(1)
Esempio n. 34
0
    def __init__(self, queue="sqs_handler_debug", aws_key=None, secret_key=None):
        ''' Sends log messages to SNS. Parameters: 
        * queue is the SQS queue. This will be created if it does not exist. 
        * Optional: aws_key and secret_key. If these don't exist, it will look 
          at the appropriate environment variables. 
        '''

        logging.Handler.__init__(self)
        if aws_key and secret_key:
            conn = SQSConnection(aws_key, secret_key)
        else:
            conn = SQSConnection()
        self.q = conn.create_queue(queue)
def send_ds_encode_sqs(pid, dsid):
    config = ConfigParser.SafeConfigParser()
    config.read("awsMiscTools.conf")
    message_body = json.dumps({"pid": pid, "dsid": dsid, "language": "eng"})
    try:
        sqsConn = SQSConnection(config.get("SQS", "access_key"), config.get("SQS", "secret_key"))
        queue = sqsConn.create_queue(config.get("SQS", "queue_name"))
    except:
        sys.exit("Cannot initialize SQS queue (check credentials?)")
    message = Message()
    message.set_body(message_body)
    print message_body
    queue.write(message)
Esempio n. 36
0
class SQS(object):

    def __init__(self, config="config.ini"):
        if isinstance(config, basestring):
            config = credentials.ConfigFileCredentials(config)
        elif not isinstance(config, credentials.Credentials):
            raise TypeError("Unsupported config parameter type")

        aws_access_key_id, aws_secret_access_key, aws_queue = config.get_data()

        try:
            self.conn = SQSConnection(aws_access_key_id, aws_secret_access_key)
            self.set_queue(aws_queue)
        except:
            print 'Error connection'

    def get_all_queues(self):
        return self.conn.get_all_queues()

    def get_queue_attributes(self):
        return self.conn.get_queue_attributes(self.queue, attribute='All')

    def create_queue(self, queue, timeout):
        return self.conn.create_queue(queue, timeout)

    def set_queue(self, queue):
        self.queue = self.conn.get_queue(queue)
        return True

    def get_messages(self, limit=10):
        return self.queue.get_messages(limit)

    def count(self):
        #print "Count: %s" % self.queue.count()
        return self.queue.count()

    def write(self, data):
        m = Message()
        m.set_body(json.dumps(data))
        return self.queue.write(m)

    def delete(self, id):
        #print "Eliminando %s" % id
        self.queue.delete_message(id)

    def clear(self):
        return self.queue.clear()

    def delete_queue(self):
        return self.conn.delete_queue(self.queue)
Esempio n. 37
0
    def test_queue_deletion_affects_full_queues(self):
        conn = SQSConnection()
        initial_count = len(conn.get_all_queues())

        empty = conn.create_queue("empty%d" % int(time.time()))
        full = conn.create_queue("full%d" % int(time.time()))
        time.sleep(60)
        # Make sure they're both around.
        self.assertEqual(len(conn.get_all_queues()), initial_count + 2)

        # Put a message in the full queue.
        m1 = Message()
        m1.set_body("This is a test message.")
        full.write(m1)
        self.assertEqual(full.count(), 1)

        self.assertTrue(conn.delete_queue(empty))
        # Here's the regression for the docs. SQS will delete a queue with
        # messages in it, no ``force_deletion`` needed.
        self.assertTrue(conn.delete_queue(full))
        # Wait long enough for SQS to finally remove the queues.
        time.sleep(90)
        self.assertEqual(len(conn.get_all_queues()), initial_count)
Esempio n. 38
0
    def test_queue_deletion_affects_full_queues(self):
        conn = SQSConnection()
        initial_count = len(conn.get_all_queues())

        empty = conn.create_queue('empty%d' % int(time.time()))
        full = conn.create_queue('full%d' % int(time.time()))
        time.sleep(60)
        # Make sure they're both around.
        self.assertEqual(len(conn.get_all_queues()), initial_count + 2)

        # Put a message in the full queue.
        m1 = Message()
        m1.set_body('This is a test message.')
        full.write(m1)
        self.assertEqual(full.count(), 1)

        self.assertTrue(conn.delete_queue(empty))
        # Here's the regression for the docs. SQS will delete a queue with
        # messages in it, no ``force_deletion`` needed.
        self.assertTrue(conn.delete_queue(full))
        # Wait long enough for SQS to finally remove the queues.
        time.sleep(90)
        self.assertEqual(len(conn.get_all_queues()), initial_count)
Esempio n. 39
0
 def setUp(self):
     # Ensure the queue is clear before we start, or we'll lose more hair
     creds = aws_credentials.get_credentials()
     region_name = 'eu-west-1'
     region = [r for r in regions() if r.name == region_name][0]
     conn = SQSConnection(aws_access_key_id=creds[0],
       aws_secret_access_key=creds[1],
       region=region)
     q = conn.create_queue(self.qname, 30)
     cruft = q.get_messages(10)
     while cruft:
         for c in cruft:
             print 'deleting old message %s', c.__dict__
             q.delete_message(c)
         cruft = q.get_messages(10)
Esempio n. 40
0
def dequeue(q_name, func, conn=None):

    conn_logger.debug('dequeuing %s' % q_name)
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    #q.clear()
    rs = q.get_messages(5)
    if len(rs) == 0:
        conn_logger.info('%s queue is empty' % q_name)
    else:
        for m in rs:
            msg = m.get_body()
            if func(msg, conn):
                q.delete_message(m)
                conn_logger.info('dequeue done: %s from %s' % (msg, q_name))
Esempio n. 41
0
 def setUp(self):
     # Ensure the queue is clear before we start, or we'll lose more hair
     creds = aws_credentials.get_credentials()
     region_name = 'eu-west-1'
     region = [r for r in regions() if r.name == region_name][0]
     conn = SQSConnection(aws_access_key_id=creds[0],
                          aws_secret_access_key=creds[1],
                          region=region)
     q = conn.create_queue(self.qname, 30)
     cruft = q.get_messages(10)
     while cruft:
         for c in cruft:
             print 'deleting old message %s', c.__dict__
             q.delete_message(c)
         cruft = q.get_messages(10)
Esempio n. 42
0
def dequeue(q_name, func, conn=None):

    conn_logger.debug('dequeuing %s' % q_name)
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    #q.clear()
    rs = q.get_messages(5)
    if len(rs) == 0:
        conn_logger.info('%s queue is empty' % q_name)
    else:    
        for m in rs:
            msg = m.get_body()
            if func(msg, conn):
                q.delete_message(m)
                conn_logger.info('dequeue done: %s from %s' % (msg, q_name))
Esempio n. 43
0
class awssqs:
    def __init__(self, name, visibility_timeout=60):
        self.visibility_timeout = visibility_timeout
        self.conn = SQSConnection(region=boto.sqs.regions()[1])  # eu-west1
        self.q = self.conn.create_queue(name)

        if self.q is None:
            raise Exception("Could not get that queue " + name)
        self.name = name

    def write(self, message):
        if self.q is None:
            raise Exception("Queue is none " + self.name)
        m = Message()
        m.set_body(message)
        success = self.q.write(m)
        failed = 0
        while not success:
            time.sleep(5)
            success = self.q.write(m)  # Keep trying until success
            failed += 1
            if failed > 10:
                raise Exception("Failed over 10 times to write to queue %s!" %
                                self.name)

    # Return a Message, use m.get_body() to get the text
    def read(self):
        if self.q is None:
            raise Exception("Queue is none " + self.name)
        rs = self.q.get_messages(visibility_timeout=self.visibility_timeout)
        if len(rs) > 0:
            m = rs[0]
            return m
        return None

    def length(self):
        if self.q is None:
            raise Exception("Queue is none " + self.name)
        rs = self.q.get_messages(visibility_timeout=self.visibility_timeout)
        return len(rs)

    def delete(self, m):
        self.q.delete_message(m)

    def deleteQueue(self):
        self.conn.delete_queue(self.q, force_deletion=True)
        self.q = None
        self.conn.close()
class awssqs:
	def __init__(self, name, visibility_timeout=60):
		self.visibility_timeout = visibility_timeout
		self.conn = SQSConnection(region=boto.sqs.regions()[1]) # eu-west1
		self.q = self.conn.create_queue(name)

		if self.q is None:
			raise Exception("Could not get that queue " + name)
		self.name = name

	def write(self, message):
		if self.q is None:
			raise Exception("Queue is none " + self.name)
		m = Message()
		m.set_body(message)
		success = self.q.write(m)
		failed = 0
		while not success:
			time.sleep(5)
			success = self.q.write(m) # Keep trying until success
			failed +=1
			if failed > 10:
				raise Exception("Failed over 10 times to write to queue %s!" % self.name)

	# Return a Message, use m.get_body() to get the text
	def read(self):
		if self.q is None:
			raise Exception("Queue is none " + self.name)
		rs = self.q.get_messages(visibility_timeout = self.visibility_timeout)
		if len(rs) > 0:
			m = rs[0]
			return m
		return None

	def length(self):
		if self.q is None:
			raise Exception("Queue is none " + self.name)
		rs = self.q.get_messages(visibility_timeout = self.visibility_timeout)
		return len(rs)


	def delete(self, m):
		self.q.delete_message(m)

	def deleteQueue(self):
		self.conn.delete_queue(self.q, force_deletion = True)
		self.q = None
		self.conn.close()
Esempio n. 45
0
File: sqs.py Progetto: imclab/pi-eye
def pop(queue='pi-status'):
    '''
    Pop the next Message off the queue, immediately delete it
    (mark it as consumed), and return the body string.

    Presumably, these will be a JSONMessage instances, and get_body() will return a dict/list/etc.
    '''
    sqs = SQSConnection()
    sqs_queue = sqs.create_queue(queue)
    sqs_queue.set_message_class(JSONMessage)

    message = sqs_queue.read()
    if message is not None:
        body = message.get_body()
        message.delete()
        return body
Esempio n. 46
0
class SQSQueue(object):

    implements(interface.IQueue)

    def __init__(self, name, acc_sec_pair=None, visibility_timeout=30):
        self.name = name
        if acc_sec_pair is None:
            acc_sec_pair = aws_credentials.get_credentials()
        self.region_name = 'eu-west-1'
        self.region = [r for r in regions() if r.name == self.region_name][0]
        self.conn = SQSConnection(aws_access_key_id=acc_sec_pair[0],
                                  aws_secret_access_key=acc_sec_pair[1],
                                  region=self.region)
        self.q = self.conn.create_queue(name, visibility_timeout)
        self.q.set_message_class(event.SQSEvent)

    def put_event(self, event_in):
        assert isinstance(event_in, event.SQSEvent)
        self.q.write(event_in)

    def get_event(self, block=True, timeout=None):
        ev = self.q.read()
        return ev

    def event_factory(self, event_id, data={}):
        return event.SQSEvent(event_id=event_id, data=data)

    def _expand_decimal_values(self, data):
        """expand Decimals which otherwise cause msgpack exception"""
        for k, v in data.items():
            if type(v) is decimal.Decimal:
                data[k] = unicode(v)

    def create_and_send(self, event_id, data={}):
        self._expand_decimal_values(data)
        ev = self.event_factory(event_id=event_id, data=data)
        self.put_event(ev)
Esempio n. 47
0
    def test_1_basic(self):
        print '--- running SQSConnection tests ---'
        c = SQSConnection()
        rs = c.get_all_queues()
        num_queues = 0
        for q in rs:
            num_queues += 1
    
        # try illegal name
        try:
            queue = c.create_queue('bad*queue*name')
            self.fail('queue name should have been bad')
        except SQSError:
            pass
        
        # now create one that should work and should be unique (i.e. a new one)
        queue_name = 'test%d' % int(time.time())
        timeout = 60
        queue = c.create_queue(queue_name, timeout)
        time.sleep(60)
        rs  = c.get_all_queues()
        i = 0
        for q in rs:
            i += 1
        assert i == num_queues+1
        assert queue.count_slow() == 0

        # check the visibility timeout
        t = queue.get_timeout()
        assert t == timeout, '%d != %d' % (t, timeout)

        # now try to get queue attributes
        a = q.get_attributes()
        assert a.has_key('ApproximateNumberOfMessages')
        assert a.has_key('VisibilityTimeout')
        a = q.get_attributes('ApproximateNumberOfMessages')
        assert a.has_key('ApproximateNumberOfMessages')
        assert not a.has_key('VisibilityTimeout')
        a = q.get_attributes('VisibilityTimeout')
        assert not a.has_key('ApproximateNumberOfMessages')
        assert a.has_key('VisibilityTimeout')

        # now change the visibility timeout
        timeout = 45
        queue.set_timeout(timeout)
        time.sleep(60)
        t = queue.get_timeout()
        assert t == timeout, '%d != %d' % (t, timeout)
    
        # now add a message
        message_body = 'This is a test\n'
        message = queue.new_message(message_body)
        queue.write(message)
        time.sleep(60)
        assert queue.count_slow() == 1
        time.sleep(90)

        # now read the message from the queue with a 10 second timeout
        message = queue.read(visibility_timeout=10)
        assert message
        assert message.get_body() == message_body

        # now immediately try another read, shouldn't find anything
        message = queue.read()
        assert message == None

        # now wait 30 seconds and try again
        time.sleep(30)
        message = queue.read()
        assert message

        # now delete the message
        queue.delete_message(message)
        time.sleep(30)
        assert queue.count_slow() == 0

        # try a batch write
        num_msgs = 10
        msgs = [(i, 'This is message %d' % i, 0) for i in range(num_msgs)]
        queue.write_batch(msgs)

        # try to delete all of the messages using batch delete
        deleted = 0
        while deleted < num_msgs:
            time.sleep(5)
            msgs = queue.get_messages(num_msgs)
            if msgs:
                br = queue.delete_message_batch(msgs)
                deleted += len(br.results)

        # create another queue so we can test force deletion
        # we will also test MHMessage with this queue
        queue_name = 'test%d' % int(time.time())
        timeout = 60
        queue = c.create_queue(queue_name, timeout)
        queue.set_message_class(MHMessage)
        time.sleep(30)
        
        # now add a couple of messages
        message = queue.new_message()
        message['foo'] = 'bar'
        queue.write(message)
        message_body = {'fie' : 'baz', 'foo' : 'bar'}
        message = queue.new_message(body=message_body)
        queue.write(message)
        time.sleep(30)

        m = queue.read()
        assert m['foo'] == 'bar'

        # now delete that queue and messages
        c.delete_queue(queue, True)

        print '--- tests completed ---'
Esempio n. 48
0
class AmazonSQS(StorageService):
    def __init__(self):
        self.last_msg = -1
        self.buffered = []
        self.msg_num = 0
        self.access_key_id = ""
        self.secret_access_key = ""
        self.conn = SQSConnection(self.access_key_id, self.secret_access_key,
                                  True, None, None, None, None, None)

    def new_connection(self, conn_id, conn_name):
        conns = self.conn.create_queue("connections")
        self.put(conns, conn_name)
        return (self.get_requests_loc(conn_id),
                self.get_responses_loc(conn_id))

    def get_place(self, place_name):
        return self.create_queue(place_name)

    def get_connection(self):
        conns = self.conn.create_queue("connections")
        return self.get(conns)

    def get_connections_loc(self):
        return self.conn.create_queue("connections")

    def get_responses_loc(self, conn_id):
        return self.conn.create_queue("%s_response" % conn_id)

    def get_requests_loc(self, conn_id):
        return self.conn.create_queue("%s_request" % conn_id)

    def put(self, q, data, use_seq_num=False):
        m = Message()
        if (use_seq_num):
            data = str(self.msg_num) + " " + data
            self.msg_num += 1
        m.set_body(data)
        #        print "Putting data: %s"%data
        status = q.write(m)
        if (status == False):
            print "Put failed"
            return False
        return True

    def get(self, q, use_seq_num=False):
        if (len(self.buffered) > 0
                and self.buffered[0][0] == self.last_msg + 1):
            data = self.buffered[0][1]
            self.buffered = self.buffered[1:]
            self.last_msg += 1
            return data
        m = q.read()
        if (m == None):
            return False
        else:
            q.delete_message(m)
            if (not use_seq_num):
                return m.get_body()
            else:
                msg_num, sep, msg = m.get_body().partition(" ")
                msg_num = int(msg_num)
                if (msg_num == self.last_msg + 1):
                    #                    print "Using msg %d"%msg_num
                    self.last_msg = msg_num
                    return msg
                else:
                    #                    print "Buffering msg %d, last_msg: %d"%(msg_num, self.last_msg)
                    self.buffered.append((msg_num, msg))
                    self.buffered.sort()

    def create_queue(self, name):
        return self.conn.create_queue(name)

    def delete(self, place, data):
        msgs = place.get_messages()
        for m in msgs:
            if (m.get_body() == data):
                place.delete_message(m)
Esempio n. 49
0
    def test_1_basic(self):
        print '--- running SQSConnection tests ---'
        c = SQSConnection()
        rs = c.get_all_queues()
        num_queues = 0
        for q in rs:
            num_queues += 1

        # try illegal name
        try:
            queue = c.create_queue('bad_queue_name')
        except SQSError:
            pass

        # now create one that should work and should be unique (i.e. a new one)
        queue_name = 'test%d' % int(time.time())
        timeout = 60
        queue = c.create_queue(queue_name, timeout)
        time.sleep(60)
        rs = c.get_all_queues()
        i = 0
        for q in rs:
            i += 1
        assert i == num_queues + 1
        assert queue.count_slow() == 0

        # check the visibility timeout
        t = queue.get_timeout()
        assert t == timeout, '%d != %d' % (t, timeout)

        # now try to get queue attributes
        a = q.get_attributes()
        assert a.has_key('ApproximateNumberOfMessages')
        assert a.has_key('VisibilityTimeout')
        a = q.get_attributes('ApproximateNumberOfMessages')
        assert a.has_key('ApproximateNumberOfMessages')
        assert not a.has_key('VisibilityTimeout')
        a = q.get_attributes('VisibilityTimeout')
        assert not a.has_key('ApproximateNumberOfMessages')
        assert a.has_key('VisibilityTimeout')

        # now change the visibility timeout
        timeout = 45
        queue.set_timeout(timeout)
        time.sleep(60)
        t = queue.get_timeout()
        assert t == timeout, '%d != %d' % (t, timeout)

        # now add a message
        message_body = 'This is a test\n'
        message = queue.new_message(message_body)
        queue.write(message)
        time.sleep(30)
        assert queue.count_slow() == 1
        time.sleep(30)

        # now read the message from the queue with a 10 second timeout
        message = queue.read(visibility_timeout=10)
        assert message
        assert message.get_body() == message_body

        # now immediately try another read, shouldn't find anything
        message = queue.read()
        assert message == None

        # now wait 30 seconds and try again
        time.sleep(30)
        message = queue.read()
        assert message

        if c.APIVersion == '2007-05-01':
            # now terminate the visibility timeout for this message
            message.change_visibility(0)
            # now see if we can read it in the queue
            message = queue.read()
            assert message

        # now delete the message
        queue.delete_message(message)
        time.sleep(30)
        assert queue.count_slow() == 0

        # create another queue so we can test force deletion
        # we will also test MHMessage with this queue
        queue_name = 'test%d' % int(time.time())
        timeout = 60
        queue = c.create_queue(queue_name, timeout)
        queue.set_message_class(MHMessage)
        time.sleep(30)

        # now add a couple of messages
        message = queue.new_message()
        message['foo'] = 'bar'
        queue.write(message)
        message_body = {'fie': 'baz', 'foo': 'bar'}
        message = queue.new_message(body=message_body)
        queue.write(message)
        time.sleep(30)

        m = queue.read()
        assert m['foo'] == 'bar'

        # now delete that queue and messages
        c.delete_queue(queue, True)

        print '--- tests completed ---'
Esempio n. 50
0
config = ConfigParser.RawConfigParser()
config.read('/home/ubuntu/fz/conf/app.cfg')

AWS_ACCESS_KEY = config.get('aws', 'accesskey')
AWS_SECRET_KEY = config.get('aws', 'secretkey')
ParsePy.APPLICATION_ID = config.get('parse', 'P_APP_ID')
ParsePy.MASTER_KEY = config.get('parse', 'P_MASTER_KEY')
_convert = config.get('fz', 'convertQueueName')
_upload = config.get('fz', 'uploadQueueName')

activedomain = 'http://%s' % (config.get('fz', 'activedomain'))

conn = SQSConnection(AWS_ACCESS_KEY, AWS_SECRET_KEY)

convert_q = conn.create_queue(_convert, 120)
upload_q = conn.create_queue(_upload, 120)


def processMessageFromSQS(temp_message):

    message_dict = json.loads(temp_message.get_body())
    convert_q.delete_message(temp_message)
    pprint.pprint(message_dict)

    figureObject = ParsePy.ParseQuery("UploadObject").get(
        message_dict['parseID'])

    if figureObject == None:
        print 'object not found'
class SQSAuthParams(AWSMockServiceTestCase):
    connection_class = SQSConnection

    def setUp(self):
        super(SQSAuthParams, self).setUp()

    def default_body(self):
        return """<?xml version="1.0"?>
            <CreateQueueResponse>
              <CreateQueueResult>
                <QueueUrl>
                  https://queue.amazonaws.com/599169622985/myqueue1
                </QueueUrl>
              </CreateQueueResult>
              <ResponseMetadata>
                <RequestId>54d4c94d-2307-54a8-bb27-806a682a5abd</RequestId>
              </ResponseMetadata>
            </CreateQueueResponse>"""

    def test_auth_service_name_override(self):
        self.set_http_response(status_code=200)
        # We can use the auth_service_name to change what service
        # name to use for the credential scope for sigv4.
        self.service_connection.auth_service_name = 'service_override'

        self.service_connection.create_queue('my_queue')
        # Note the service_override value instead.
        self.assertIn('us-east-1/service_override/aws4_request',
                      self.actual_request.headers['Authorization'])

    def test_class_attribute_can_set_service_name(self):
        self.set_http_response(status_code=200)
        # The SQS class has an 'AuthServiceName' param of 'sqs':
        self.assertEqual(self.service_connection.AuthServiceName, 'sqs')

        self.service_connection.create_queue('my_queue')
        # And because of this, the value of 'sqs' will be used instead of
        # 'queue' for the credential scope:
        self.assertIn('us-east-1/sqs/aws4_request',
                      self.actual_request.headers['Authorization'])

    def test_auth_region_name_is_automatically_updated(self):
        region = SQSRegionInfo(name='us-west-2',
                               endpoint='us-west-2.queue.amazonaws.com')
        self.service_connection = SQSConnection(
            https_connection_factory=self.https_connection_factory,
            aws_access_key_id='aws_access_key_id',
            aws_secret_access_key='aws_secret_access_key',
            region=region)
        self.initialize_service_connection()
        self.set_http_response(status_code=200)

        self.service_connection.create_queue('my_queue')

        # Note the region name below is 'us-west-2'.
        self.assertIn('us-west-2/sqs/aws4_request',
                      self.actual_request.headers['Authorization'])

    def test_set_get_auth_service_and_region_names(self):
        self.service_connection.auth_service_name = 'service_name'
        self.service_connection.auth_region_name = 'region_name'

        self.assertEqual(self.service_connection.auth_service_name,
                         'service_name')
        self.assertEqual(self.service_connection.auth_region_name,
                         'region_name')

    def test_get_queue_with_owner_account_id_returns_queue(self):

        self.set_http_response(status_code=200)
        self.service_connection.create_queue('my_queue')

        self.service_connection.get_queue('my_queue', '599169622985')

        assert 'QueueOwnerAWSAccountId' in self.actual_request.params.keys()
        self.assertEquals(self.actual_request.params['QueueOwnerAWSAccountId'],
                          '599169622985')
class SNSSubcribeSQSTest(unittest.TestCase):

    sqs = True
    sns = True

    def setUp(self):
        self.sqsc = SQSConnection()
        self.snsc = SNSConnection()

    def get_policy_statements(self, queue):
        attrs = queue.get_attributes('Policy')
        policy = json.loads(attrs.get('Policy', "{}"))
        return policy.get('Statement', {})

    def test_correct_sid(self):
        now = time.time()
        topic_name = queue_name = "test_correct_sid%d" % (now)

        timeout = 60
        queue = self.sqsc.create_queue(queue_name, timeout)
        self.addCleanup(self.sqsc.delete_queue, queue, True)
        queue_arn = queue.arn

        topic = self.snsc.create_topic(topic_name)
        topic_arn = topic['CreateTopicResponse']['CreateTopicResult']\
                ['TopicArn']
        self.addCleanup(self.snsc.delete_topic, topic_arn)

        expected_sid = hashlib.md5(
            (topic_arn + queue_arn).encode('utf-8')).hexdigest()
        resp = self.snsc.subscribe_sqs_queue(topic_arn, queue)

        found_expected_sid = False
        statements = self.get_policy_statements(queue)
        for statement in statements:
            if statement['Sid'] == expected_sid:
                found_expected_sid = True
                break
        self.assertTrue(found_expected_sid)

    def test_idempotent_subscribe(self):
        now = time.time()
        topic_name = queue_name = "test_idempotent_subscribe%d" % (now)

        timeout = 60
        queue = self.sqsc.create_queue(queue_name, timeout)
        self.addCleanup(self.sqsc.delete_queue, queue, True)
        initial_statements = self.get_policy_statements(queue)
        queue_arn = queue.arn

        topic = self.snsc.create_topic(topic_name)
        topic_arn = topic['CreateTopicResponse']['CreateTopicResult']\
                ['TopicArn']
        self.addCleanup(self.snsc.delete_topic, topic_arn)

        resp = self.snsc.subscribe_sqs_queue(topic_arn, queue)
        time.sleep(3)
        first_subscribe_statements = self.get_policy_statements(queue)
        self.assertEqual(len(first_subscribe_statements),
                         len(initial_statements) + 1)

        resp2 = self.snsc.subscribe_sqs_queue(topic_arn, queue)
        time.sleep(3)
        second_subscribe_statements = self.get_policy_statements(queue)
        self.assertEqual(len(second_subscribe_statements),
                         len(first_subscribe_statements))
Esempio n. 53
0
import ParsePy

from boto.s3.key import Key
from boto.sqs.message import Message
from boto.sqs.connection import SQSConnection
from boto.s3.connection import S3Connection

AWS_ACCESS_KEY = ''
AWS_SECRET_KEY = ''

ParsePy.APPLICATION_ID = ""
ParsePy.MASTER_KEY = ""

conn = SQSConnection(AWS_ACCESS_KEY, AWS_SECRET_KEY)

convert_q = conn.create_queue('FZconvertQueue', 120)
upload_q = conn.create_queue('FZuploadQueue', 120)

convert_q.clear()
upload_q.clear()

query = ParsePy.ParseQuery("UploadObject")
fzobjects = query.fetch()

for fzobj in fzobjects:
    print fzobj.objectId()
    fzobj.delete()

print convert_q.count()
print upload_q.count()
print len(fzobjects)
Esempio n. 54
0
import conf
from time import time
import json
import random
from boto.sqs.connection import SQSConnection, Message
from flask import Flask, render_template, request

VERSION = "0.1"

APP = Flask(__name__)

SQS_CONN = SQSConnection(conf.AWS_KEY, conf.AWS_SECRET)
SQS_QUEUE = SQS_CONN.create_queue(conf.SQS_QUEUE_NAME)

SURVEY = json.load(open(conf.SURVEY_FILE, 'rt'))

QUESTIONS_BY_ID = {q['id']: q for q in SURVEY['questions']}
ORDERS_BY_ID = {o['id']: o for o in SURVEY['question_orders']}


def build_sampling_map():
    m = {}
    i = 0
    for o in SURVEY['question_orders']:
        for j in range(o['frequency']):
            m[i + j] = o['id']
        i += o['frequency']
    return m


SAMPLING_MAP = build_sampling_map()
Esempio n. 55
0
# this script expects 2 environment variables
#    1. SQS_KEY_ID (preferably an IAM user with limited rights)
#    2. SQS_SECRET_KEY (accompanying secret key)
#    3. SQS_TASK_QUEUE (the queue to use)

import os
import sys

from boto.sqs.connection import SQSConnection
from boto.sqs.message import Message

import utils

# your amazon keys
key = os.environ['SQS_KEY_ID']
access = os.environ['SQS_ACCESS_KEY']
queue = os.environ['SQS_TASK_QUEUE']

if __name__ == '__main__':
    region_info = utils.get_region_info()
    sqs = SQSConnection(key, access, region=region_info)

    tasks = sqs.create_queue(queue)

    m = Message()
    m.set_body(sys.argv[1])
    tasks.write(m)