Example #1
0
 def request_images(self, params):
     if len(params)==0: return
     
     scene = params[0]['scene']
     scene = scene.replace('.3dm','')
     lowpriority = ""
     #print "low_priority"
     #print params[0]['low_priority']
     if params[0]['low_priority']==True:
         lowpriority = "_lowpriority"
     q_name = "%s%s_%s_%s" % (self.site_name, lowpriority, scene, 'request')
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     q = conn.create_queue(q_name)
     q.set_message_class(Message)
     params = self.trunc_params(params)
     messages = []
     for i in range(len(params)):
         #params[i]['params']['textParam'] = 'sunsun'
         body = simplejson.dumps(params[i])
         
         sys.stderr.write(body + "\n")
         messages.append((i,base64.b64encode(body),0))
        
     for j in range(int(math.ceil(float(len(params))/10.0))):
         conn.send_message_batch(q, messages[j*10:(j+1)*10])
     sys.stderr.write("\n\n\nSent messages\n\n\n")
     return
Example #2
0
def _process_message():
    if not g.sitemap_sqs_queue:
        return

    sqs = SQSConnection()
    sqs_q = sqs.get_queue(g.sitemap_sqs_queue)

    messages = sqs.receive_message(sqs_q, number_messages=1)

    if not messages:
        return

    message, = messages

    js = json.loads(message.get_body())
    s3path = parse_s3_path(js['location'])

    # There are some error cases that allow us to get messages
    # for sitemap creation that are now out of date.
    timestamp = js.get('timestamp')
    if timestamp is not None and _before_last_sitemap(timestamp):
        sqs_q.delete_message(message)
        return

    g.log.info("Got import job %r", js)

    subreddits = find_all_subreddits(s3path)
    store_sitemaps_in_s3(subreddits)

    sqs_q.delete_message(message)
Example #3
0
    def test_get_messages_attributes(self):
        conn = SQSConnection()
        current_timestamp = int(time.time())
        queue_name = 'test%d' % int(time.time())
        test = conn.create_queue(queue_name)
        self.addCleanup(conn.delete_queue, test)
        time.sleep(65)

        # Put a message in the queue.
        m1 = Message()
        m1.set_body('This is a test message.')
        test.write(m1)
        self.assertEqual(test.count(), 1)

        # Check all attributes.
        msgs = test.get_messages(num_messages=1, attributes='All')
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            first_rec = msg.attributes['ApproximateFirstReceiveTimestamp']
            first_rec = int(first_rec) / 1000
            self.assertTrue(first_rec >= current_timestamp)

        # Put another message in the queue.
        m2 = Message()
        m2.set_body('This is another test message.')
        test.write(m2)
        self.assertEqual(test.count(), 1)

        # Check a specific attribute.
        msgs = test.get_messages(num_messages=1,
                                 attributes='ApproximateReceiveCount')
        for msg in msgs:
            self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
            with self.assertRaises(KeyError):
                msg.attributes['ApproximateFirstReceiveTimestamp']
Example #4
0
    def test_sqs_longpoll(self):
        c = SQSConnection()
        queue_name = "test_sqs_longpoll_%s" % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        messages = []

        # The basic idea is to spawn a timer thread that will put something
        # on the queue in 5 seconds and verify that our long polling client
        # sees the message after waiting for approximately that long.
        def send_message():
            messages.append(queue.write(queue.new_message("this is a test message")))

        t = Timer(5.0, send_message)
        t.start()
        self.addCleanup(t.join)

        start = time.time()
        response = queue.read(wait_time_seconds=10)
        end = time.time()

        t.join()
        self.assertEqual(response.id, messages[0].id)
        self.assertEqual(response.get_body(), messages[0].get_body())
        # The timer thread should send the message in 5 seconds, so
        # we're giving +- .5 seconds for the total time the queue
        # was blocked on the read call.
        self.assertTrue(4.5 <= (end - start) <= 5.5)
Example #5
0
class SqsApi(object):
    def __init__(self):
        self.conn = SQSConnection(aws_access_key_id=AWS_ACCESS_KEY,
                                  aws_secret_access_key=AWS_SECRET_KEY)

    def create_queue(self, name="MuscleOpsQ"):
        return self.conn.create_queue(name)

    def list_queues(self):
        return self.conn.get_all_queues()

    def write_message(self,
                      q,
                      msg="If you're sleepy and you know it; clap your hands!"
                      ):
        m = RawMessage()
        m.set_body(msg)
        q.write(m)

    def read_message(self,
                     q,
                     msg="If you're sleepy and you know it; clap your hands!"):
        rs = q.get_messages()
        m = rs[0].get_body() if len(rs) else msg
        return m
Example #6
0
    def test_worker_consumes_queue(self):
        sqs = SQSConnection(region=get_sqs_region('us-east-1'))

        self.assertEqual(len(sqs.get_all_queues()), 0)

        queue = sqs.create_queue('test_events')

        queue.write(make_message({'test': '1'}))

        self.assertEqual(queue.count(), 1)

        worker = Worker()
        worker.ctrl.wait_time_seconds = 0.1
        worker.idle_time_seconds = 0.1
        worker.add_consumer('test_events', dummy_consumer)

        with spawn_worker(worker):
            time.sleep(.2)
            self.assertEqual(queue.count(), 0)

            queue.write(make_message({'test': '2'}))

            self.assertEqual(queue.count(), 1)

            time.sleep(.2)

            self.assertEqual(queue.count(), 0)
Example #7
0
def commentsubmit(req):
	sdb = boto.connect_sdb(AWSKey, AWSSecret)
	domain = sdb.get_domain('comment')
	form = req.form
	imagekey = form['imagekey']
	user = form['commentuser']
	cmt = form['comment']	
	import uuid
	from time import strftime
	guid = str(uuid.uuid1())
	item = domain.new_item(guid)
	item['submituser'] = user
	item['imagekey'] = imagekey
	item['comment'] = cmt
	item['status'] = "processing"
	item['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S")
	item.save()
	sqsconn = SQSConnection(AWSKey, AWSSecret)
	q = sqsconn.get_queue('commentprocess')
	request = {}
	request['commentkey'] = guid
	request['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S")
	request['comment'] = str(cmt)
	request['submituser'] = str(user)
	m = RawMessage()
	m.set_body(json.write(request))
	status = q.write(m)
	response = {}
	if status==m:
		response['complete'] = True
		response['commentkey'] = guid
	else:
		response['complete'] = False
	return json.write(response)
Example #8
0
    def test_sqs_longpoll(self):
        c = SQSConnection()
        queue_name = 'test_sqs_longpoll_%s' % int(time.time())
        queue = c.create_queue(queue_name)
        self.addCleanup(c.delete_queue, queue, True)
        messages = []

        # The basic idea is to spawn a timer thread that will put something
        # on the queue in 5 seconds and verify that our long polling client
        # sees the message after waiting for approximately that long.
        def send_message():
            messages.append(
                queue.write(queue.new_message('this is a test message')))

        t = Timer(5.0, send_message)
        t.start()
        self.addCleanup(t.join)

        start = time.time()
        response = queue.read(wait_time_seconds=10)
        end = time.time()

        t.join()
        self.assertEqual(response.id, messages[0].id)
        self.assertEqual(response.get_body(), messages[0].get_body())
        # The timer thread should send the message in 5 seconds, so
        # we're giving +- .5 seconds for the total time the queue
        # was blocked on the read call.
        self.assertTrue(4.5 <= (end - start) <= 5.5)
Example #9
0
 def op_consume(self, args):
     sqs = SQSConnection()
     q = sqs.get_queue('test')
     q.set_message_class(RawMessage)
     for message in q.get_messages():
         print message.get_body()
         q.delete_message(message)
Example #10
0
File: models.py Project: tml/norc
 def __init__(self, *args, **kwargs):
     Queue.__init__(self, *args, **kwargs)
     c = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
     self.queue = c.lookup(self.name)
     if not self.queue:
         self.queue = c.create_queue(self.name, 1)
     self.connection = c
Example #11
0
    def __init__(self,num_threads):
        
        # figure out who(m) we are
        self.hostname = os.uname()[1];
        
        # make a note of when we launched
        self.start_time = datetime.datetime.now();
        
        # create a connection to SQS
        conn = SQSConnection();
        
        # ask for the JOB_QUEUE
        self.jobQueue = conn.get_queue(JOB_QUEUE);
        
        # ask for the ERR_QUEUE
        self.errQueue = conn.get_queue(ERR_QUEUE);
        
        # setup reflection for error logging etc
        self.reflection = Reflection.Reflect();
        
        # init empty list of threads
        self.threads = [];
        
        # make note of the number of requested threads
        self.num_threads = num_threads;

        # empty node states timer until we get job
        self.node_stats_timer = None;
Example #12
0
    def publish_node_stats(self):

        try:

            # create a connection to SQS
            conn = SQSConnection()

            # ask for the QUEUE
            q = conn.get_queue(NODE_STATS_QUEUE)

            # create a new message
            m = Message()

            # populate the message with stats
            m.set_body(self.node_stats())

            # publish the message to SQS
            q.write(m)

            # schedule another publish
            self.schedule_node_stats()

        except Exception as e:

            # blab about the err on std err
            os.sys.stderr.write(str(e) + '\n')

            # log error message to the error queue
            self.publish_error('publish_node_stats: ' + str(e))
Example #13
0
def main():
    global task
    parser = OptionParser(
        "%prog --daemon_status_id <id> --queue_name <queue_name> \
[--nice <0>] [--stdout <file_name|DEFAULT>] [--stderr <file_name>|STDOUT>] [--debug]"
    )
    parser.add_option(
        "--daemon_status_id", action="store", type="int", help="The id of the daemon status that launched this Task"
    )
    parser.add_option("--queue_name", action="store", type="string", help="The name of the queue from which to read")
    parser.add_option("--nice", action="store", type="int", default=0, help="nice this process. defaults to 5.")
    parser.add_option(
        "--stdout",
        action="store",
        type="string",
        help="Send stdout to this file, or special value 'DEFAULT' \
sends it a the stream unique to this Task request",
    )
    parser.add_option(
        "--stderr",
        action="store",
        type="string",
        help="Send stderr to this file, or special value 'STDOUT' sends it to stdout",
    )
    parser.add_option("--debug", action="store_true", help="more messages")
    (options, args) = parser.parse_args()

    # option parsing
    if not options.daemon_status_id or not options.queue_name:
        sys.exit(parser.get_usage())
    log.set_logging_debug(options.debug)

    if not options.nice == 0:
        os.nice(options.nice)

    console_stderr = None
    try:
        c = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        q = c.get_queue(options.queue_name)
        boto_message = q.read()
        task = __get_task__(boto_message, options.queue_name)
        if task == None:
            log.debug("No task in queue '%s' pid:%s" % (options.queue_name, os.getpid()))
            sys.exit(133)
        else:
            log.debug("Starting SQS Queue '%s' Task:%s pid:%s" % (options.queue_name, task.get_id(), os.getpid()))
            q.delete_message(boto_message)
            console_stderr = __redirect_outputs__(task, options.stdout, options.stderr)
            daemon_status = __get_daemon_status__(options.daemon_status_id)
            __run_task__(task, daemon_status)
            ending_status = task.get_current_run_status()
            if ending_status == None:
                sys.exit(134)
            if not ending_status.was_successful():
                sys.exit(1)
    except SystemExit, se:
        # in python 2.4, SystemExit extends Exception, this is changed in 2.5 to
        # extend BaseException, specifically so this check isn't necessary. But
        # we're using 2.4; upon upgrade, this check will be unecessary but ignorable.
        sys.exit(se.code)
Example #14
0
    def __init__(self, num_threads):

        # figure out who(m) we are
        self.hostname = os.uname()[1]

        # make a note of when we launched
        self.start_time = datetime.datetime.now()

        # create a connection to SQS
        conn = SQSConnection()

        # ask for the JOB_QUEUE
        self.jobQueue = conn.get_queue(JOB_QUEUE)

        # ask for the ERR_QUEUE
        self.errQueue = conn.get_queue(ERR_QUEUE)

        # setup reflection for error logging etc
        self.reflection = Reflection.Reflect()

        # init empty list of threads
        self.threads = []

        # make note of the number of requested threads
        self.num_threads = num_threads

        # empty node states timer until we get job
        self.node_stats_timer = None
Example #15
0
 def publish_node_stats(self):
     
     try:
     
         # create a connection to SQS
         conn = SQSConnection();
     
         # ask for the QUEUE
         q = conn.get_queue(NODE_STATS_QUEUE);
         
         # create a new message
         m = Message();
         
         # populate the message with stats
         m.set_body(self.node_stats());
         
         # publish the message to SQS
         q.write(m);
         
         # schedule another publish
         self.schedule_node_stats();
         
     except Exception as e:
         
         # blab about the err on std err
         os.sys.stderr.write(str(e)+'\n');
         
         # log error message to the error queue
         self.publish_error('publish_node_stats: '+str(e));
Example #16
0
 def queue(self):
     if self._queue:
         return self._queue
     conn = SQSConnection(self._aws_access_key_id, self._aws_secret_access_key, self._is_secure, self._port, region=self._region)
     self._queue = conn.create_queue(self._queue_name)
     self._queue.set_message_class(self._message_class)
     return self._queue
Example #17
0
def main():
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'hcq:o:t:',
                                   ['help', 'clear', 'queue',
                                    'output', 'timeout'])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
    c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError, e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
Example #18
0
 def __init__(self, *args, **kwargs):
     Queue.__init__(self, *args, **kwargs)
     c = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
     self.queue = c.lookup(self.name)
     if not self.queue:
         self.queue = c.create_queue(self.name, 1)
     self.connection = c
Example #19
0
    def __init__(self, conf, name):
        super(MessageQueue, self).__init__(conf, name)

        conn = SQSConnection(conf['access_key'], conf['secret_access_key'])
        self._queue = conn.create_queue(self.name)
        self._queue.set_attribute('MessageRetentionPeriod', base.FOURTEEN_DAYS)
        self._vtime = base.get_vtime(conf)
Example #20
0
def main():
    try:
        opts, args = getopt.getopt(
            sys.argv[1:], 'hcq:o:t:',
            ['help', 'clear', 'queue', 'output', 'timeout'])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
    c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError, e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
Example #21
0
 def test_credentialled_SQS_access(self):
     "check that the credentials can access the SQS service"
     creds = aws_credentials.get_credentials()
     region_name = 'eu-west-1'
     region = [r for r in regions() if r.name == region_name][0]
     conn = SQSConnection(aws_access_key_id=creds[0], aws_secret_access_key=creds[1], region=region)
     q = conn.create_queue("PLEASE_KEEP_FOR_TESTING", 30)
     assert q
Example #22
0
    def __init__(self, name, visibility_timeout=60):
        self.visibility_timeout = visibility_timeout
        self.conn = SQSConnection(region=boto.sqs.regions()[1])  # eu-west1
        self.q = self.conn.create_queue(name)

        if self.q is None:
            raise Exception("Could not get that queue " + name)
        self.name = name
Example #23
0
 def __init__(self):
     self.last_msg = -1
     self.buffered = []
     self.msg_num = 0
     self.access_key_id = ""
     self.secret_access_key = ""
     self.conn = SQSConnection(self.access_key_id, self.secret_access_key,
                               True, None, None, None, None, None)
Example #24
0
    def test_empty_worker(self):
        sqs = SQSConnection(region=get_sqs_region('us-east-1'))

        self.assertEqual(len(sqs.get_all_queues()), 0)

        with spawn_worker(Worker()):
            # Worker is empty, not registering any queues
            self.assertEqual(len(sqs.get_all_queues()), 0)
Example #25
0
 def get_lowpriority_wait_count(self, scenes): 
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) 
     count = 0
     for scene in scenes:
         q_name = "%s%s_%s_%s" % (self.site_name, '_lowpriority', scene, 'request')
         q = conn.create_queue(q_name)
         count += q.count()
     return count
Example #26
0
def main():
    try:
        opts, args = getopt.getopt(
            sys.argv[1:], 'hcq:o:t:r:',
            ['help', 'clear', 'queue=', 'output=', 'timeout=', 'region='])
    except:
        usage()
        sys.exit(2)
    queue_name = ''
    output_file = ''
    timeout = 30
    region = ''
    clear = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        if o in ('-q', '--queue'):
            queue_name = a
        if o in ('-o', '--output'):
            output_file = a
        if o in ('-c', '--clear'):
            clear = True
        if o in ('-t', '--timeout'):
            timeout = int(a)
        if o in ('-r', '--region'):
            region = a
    if region:
        c = boto.sqs.connect_to_region(region)
    if c is None:
        print 'Invalid region (%s)' % region
        sys.exit(1)
    else:
        c = SQSConnection()
    if queue_name:
        try:
            rs = [c.create_queue(queue_name)]
        except SQSError as e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
    else:
        try:
            rs = c.get_all_queues()
        except SQSError as e:
            print 'An Error Occurred:'
            print '%s: %s' % (e.status, e.reason)
            print e.body
            sys.exit()
    for q in rs:
        if clear:
            n = q.clear()
            print 'clearing %d messages from %s' % (n, q.id)
        elif output_file:
            q.dump(output_file)
        else:
            print q.id, q.count(vtimeout=timeout)
Example #27
0
def add_feed_mailserver(mx):
    from boto.sqs.connection import SQSConnection
    from boto.sqs.message import Message
    conn = SQSConnection('ID', 'KEY')
    q = conn.get_queue('mailserver_list')
    m = Message()
    m.set_body(mx.ipaddr)
    q.write(m)
    return True
Example #28
0
 def _config(self, queue_name, aws_access_key_id, aws_secret_access_key, region):
     cx = SQSConnection(aws_access_key_id=aws_access_key_id,
                        aws_secret_access_key=aws_secret_access_key,
                        region=region,
                        is_secure=True)
     self._queue = cx.get_queue(queue_name)
     if not self._queue:
         raise Exception('Unable to load sqs queue %s with access_key_id %s in region %s' %
                         (queue_name, aws_access_key_id, region))
def finshi_task_sns(taskname,node_id):
    SQS_Id='TaskFinished'
    time = get_time_now()
    message = time + '|' + taskname + '|' + str(node_id)
    conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    my_queue = conn.get_queue(SQS_Id)
    m = Message()
    m.set_body(message) 
    my_queue.write(m)
Example #30
0
File: sqs.py Project: imclab/pi-eye
def push(body, queue='pi-status'):
    '''
    Create a JSON-encoded boto-style Message object and write it to the queue.
    '''
    sqs = SQSConnection()
    sqs_queue = sqs.create_queue(queue)

    message = JSONMessage(body=body)
    sqs_queue.write(message)
Example #31
0
 def adjust_ghx(self, file_name, scene):
     scene = scene.replace('.3dm','')
     q_name = "%s_%s_%s" % (self.site_name, scene, 'request')
     conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
     q = conn.create_queue(q_name)
     q.set_message_class(Message)
     body = simplejson.dumps({'operation': 'adjust_ghx', 'gh_file': file_name})
     sys.stderr.write(body + "\n")
     conn.send_message_batch(q, [(0,base64.b64encode(body),0)])
Example #32
0
File: aws.py Project: aerwin3/qork
    def __init__(self, conf, name):
        super(MessageQueue, self).__init__(conf, name)

        conn = SQSConnection(
            conf['access_key'], conf['secret_access_key'])
        self._queue = conn.create_queue(self.name)
        self._queue.set_attribute(
            'MessageRetentionPeriod', base.FOURTEEN_DAYS)
        self._vtime = base.get_vtime(conf)
Example #33
0
    def setup_sqs_queue(self):
        conn = SQSConnection()
        q = conn.create_queue('some-queue')

        m = Message()
        m.set_body('This is my first message.')
        q.write(m)

        self.assertEqual(q.count(), 1)
Example #34
0
    def setup_sqs_queue(self):
        conn = SQSConnection()
        q = conn.create_queue('some-queue')

        m = Message()
        m.set_body('This is my first message.')
        q.write(m)

        self.assertEqual(q.count(), 1)
Example #35
0
class TaskSQS:

    TASK_QUEUE = "tasks"
    RESULT_QUEUE = "results" 
    VISIBILITY_TIMEOUT = 120
    
    def __init__(self):
        self.conn = None
        self.taskq = None
        self.resultq = None

    def connect(self):
        # open connection
        self.conn = SQSConnection()
        
        # initialize queues
        self.taskq = self.conn.create_queue(self.TASK_QUEUE, self.VISIBILITY_TIMEOUT)
        self.taskq.set_message_class(JSONMessage)
        self.resultq = self.conn.create_queue(self.RESULT_QUEUE, self.VISIBILITY_TIMEOUT)
        self.resultq.set_message_class(JSONMessage)
    
    def clear(self):
        self.taskq.clear()
        self.resultq.clear()
#        self.conn.delete_queue(self.taskq)
#        self.conn.delete_queue(self.resultq)
        self.taskq = None
        self.resultq = None
    
    def new_task(self, task):
        return JSONMessage(self.taskq, task)
    
    def new_result(self, result):
        return JSONMessage(self.resultq, result)

    def put_task(self, task):
        self.taskq.write(task)
        
    def next_task(self):
        next = None
        while next is None:
            next = self.taskq.read()
        return next
    
    def complete(self, task, result=None):
        self.taskq.delete_message(task)
        if result is not None:
            self.put_result(result)
    
    def put_result(self, result):
        self.resultq.write(result)

    def get_result(self):
        result = self.resultq.read()
        if result is not None:
            self.resultq.delete_message(result)
        return result
Example #36
0
def queue(q_name, msg):

    conn_logger.debug('queue')
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    m = Message()
    m.set_body(msg)
    rs = q.write(m)
    
    conn_logger.debug('put "%s" to %s queue: ' % (msg, q_name))
def send_task_sns(taskname,taskdata,taskino,taskitype):
    SQS_Id=['NewTaskForNode1','NewTaskForNode2','NewTaskForNode3','NewTaskForNode4']
    time = get_time_now()
    message = time + '|' + taskname + '|' + taskdata + '|' + taskino + '|' + taskitype
    conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    for i in range(int(taskino)):
        my_queue = conn.get_queue(SQS_Id[i])
        m = Message()
        m.set_body(message) 
        my_queue.write(m)
Example #38
0
 def timeout(self):
     from boto.sqs.message import Message
     if not self._queue and self._connected:
         try:
             from boto.sqs.connection import SQSConnection
             sqs_connection = SQSConnection(self._access_key, self._secret_access_key)
             self._queue = sqs_connection.get_queue( self._queue_name )
         except Exception, e:
             LOGGER.error('Could not connect to logging queue %s'%self._queue_name, exc_info=e)
             self._connected = False
Example #39
0
def queue(q_name, msg):

    conn_logger.debug('queue')
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    m = Message()
    m.set_body(msg)
    rs = q.write(m)

    conn_logger.debug('put "%s" to %s queue: ' % (msg, q_name))
Example #40
0
def submitimage(req):
	sdb = boto.connect_sdb(AWSKey, AWSSecret)
	domain = sdb.get_domain('picture')
	form = req.form
	tags = str(form['tags'])
	user = str(form['submituser'])
	description = str(form['description'])
	fileitem = form['image']
	import uuid
	from time import strftime
	guid = str(uuid.uuid1())
	item = domain.new_item(guid)
	try: # Windows needs stdio set for binary mode.
		import msvcrt
		msvcrt.setmode (0, os.O_BINARY) # stdin  = 0
		msvcrt.setmode (1, os.O_BINARY) # stdout = 1
	except ImportError:
		pass
	# strip leading path from file name to avoid directory traversal attacks
	fname = os.path.basename(fileitem.filename)
	# build absolute path to files directory
	dir_path = os.path.join(os.path.dirname(req.filename), 'files')
	open(os.path.join(dir_path, fname), 'wb').write(fileitem.file.read())
	from boto.s3.connection import S3Connection
	conn = S3Connection(AWSKey, AWSSecret)
	bucket = conn.get_bucket('theimageproject')
	from boto.s3.key import Key
	k = Key(bucket)
	k.key = guid + ".jpg"
	k.set_contents_from_filename(os.path.join(dir_path, fname))
	curtime = strftime("%Y-%m-%dT%H:%M:%S")
	item['description'] = description
	item['submituser'] = user
	item['submitdate'] = curtime
	item['rating'] = 0
	item['ratingcount'] = 0
	item['ratesort'] = "%s%s" % (0, curtime)
	item['status'] = "processing"
	item['tag'] = tags.split(',')
	item.save()
	sqsconn = SQSConnection(AWSKey, AWSSecret)
	q = sqsconn.get_queue('imageprocess')
	request = {}
	request['imagekey'] = guid
	request['submitdate'] = curtime
	m = RawMessage()
	m.set_body(json.write(request))
	status = q.write(m)
	response = {}
	if status==m:
		response['success'] = True
		response['imagekey'] = guid
	else:
		response['complete'] = False
	return json.write(response)
Example #41
0
 def __init__(self, name, acc_sec_pair=None, visibility_timeout=30):
     self.name = name
     if acc_sec_pair is None:
         acc_sec_pair = aws_credentials.get_credentials()
     self.region_name = 'eu-west-1'
     self.region = [r for r in regions() if r.name == self.region_name][0]
     self.conn = SQSConnection(aws_access_key_id=acc_sec_pair[0],
                               aws_secret_access_key=acc_sec_pair[1],
                               region=self.region)
     self.q = self.conn.create_queue(name, visibility_timeout)
     self.q.set_message_class(event.SQSEvent)
 def sendMessageToSQSQueue(self, message_body, queue_name):
     try:
         sqsConn = SQSConnection(self.access_key, self.secret_key)
         queue = sqsConn.create_queue(queue_name)
         new_item_message = Message()
         new_item_message.set_body(message_body)
         if queue.write(new_item_message) :
             self.logger.info('Message added to processing queue.')
         else :
             self.logger.error('SQS service did not accept object into the queue.')
     except Exception, e:
         self.logger.error('Message could not be added to processing queue. ' + str(e))
Example #43
0
def _create_sqs_message(message):
    """A dev only function that drops a new message on the sqs queue."""
    sqs = SQSConnection()
    sqs_q = sqs.get_queue(g.sitemap_sqs_queue)

    # it returns None on failure
    assert sqs_q, "failed to connect to queue"

    sqs_message = sqs_q.new_message(body=json.dumps(message))
    sqs_q.write(sqs_message)

    g.log.info('Queued SQS message: %r', message)
def main():
    conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    my_queue = conn.get_queue(SQS_Id)
    
    while True:
        print my_queue.count()
        if my_queue.count()>0 :
            task = my_queue.read()
            process(task.get_body())
            my_queue.delete_message(task)
        else:
            time.sleep(Pull_period)
Example #45
0
def create_queue(name):
    if settings.OPENSOCIAL_DEBUG: # ローカル環境の場合なにもしない
        return None

    if not name:
        raise JobQueueError('Not specified queue name')

    # Connect to SQS queue
    sri = SQSRegionInfo(name='ap-northeast-1', endpoint='ap-northeast-1.queue.amazonaws.com')
    conn = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region=sri)
    q = conn.create_queue(name)
    return q
Example #46
0
File: sqs.py Project: imclab/pi-eye
def pop_loop(queue='pi-status', wait=5):
    # like pop(), but iterate
    sqs = SQSConnection()
    sqs_queue = sqs.create_queue(queue)
    sqs_queue.set_message_class(JSONMessage)

    while True:
        message = sqs_queue.read(wait_time_seconds=wait)
        if message is not None:
            body = message.get_body()
            message.delete()
            yield body
Example #47
0
    def __init__(self, queue="sqs_handler_debug", aws_key=None, secret_key=None):
        ''' Sends log messages to SNS. Parameters: 
        * queue is the SQS queue. This will be created if it does not exist. 
        * Optional: aws_key and secret_key. If these don't exist, it will look 
          at the appropriate environment variables. 
        '''

        logging.Handler.__init__(self)
        if aws_key and secret_key:
            conn = SQSConnection(aws_key, secret_key)
        else:
            conn = SQSConnection()
        self.q = conn.create_queue(queue)
 def timeout(self):
     from boto.sqs.message import Message
     if not self._queue and self._connected:
         try:
             from boto.sqs.connection import SQSConnection
             sqs_connection = SQSConnection(self._access_key,
                                            self._secret_access_key)
             self._queue = sqs_connection.get_queue(self._queue_name)
         except Exception, e:
             LOGGER.error('Could not connect to logging queue %s' %
                          self._queue_name,
                          exc_info=e)
             self._connected = False
Example #49
0
    def test_queue_purge(self):
        conn = SQSConnection()
        test = self.create_temp_queue(conn)
        time.sleep(65)

        # Put some messages in the queue.
        for x in range(0, 4):
            self.put_queue_message(test)
        self.assertEqual(test.count(), 4)

        # Now purge the queue
        conn.purge_queue(test)

        # Now assert queue count is 0
        self.assertEqual(test.count(), 0)
Example #50
0
def dequeue(q_name, func, conn=None):

    conn_logger.debug('dequeuing %s' % q_name)
    sqs_conn = SQSConnection(AWS_U, AWS_K)
    q = sqs_conn.create_queue(q_name)
    #q.clear()
    rs = q.get_messages(5)
    if len(rs) == 0:
        conn_logger.info('%s queue is empty' % q_name)
    else:
        for m in rs:
            msg = m.get_body()
            if func(msg, conn):
                q.delete_message(m)
                conn_logger.info('dequeue done: %s from %s' % (msg, q_name))
Example #51
0
def _create_test_message():
    """A dev only function that drops a new message on the sqs queue."""
    sqs = SQSConnection()
    sqs_q = sqs.get_queue(g.sitemap_sqs_queue)

    # it returns None on failure
    assert sqs_q, "failed to connect to queue"

    message = sqs_q.new_message(body=json.dumps({
        'job_name': 'daily-sr-sitemap-reporting',
        'location': ('s3://reddit-data-analysis/big-data/r2/prod/' +
                     'daily_sr_sitemap_reporting/dt=2016-06-14'),
        'timestamp': _current_timestamp(),
    }))
    sqs_q.write(message)
Example #52
0
def main():
    
    # create a connection to SQS
    conn = SQSConnection();
    
    # ask for the JOB_QUEUE
    q = conn.get_queue(JOB_QUEUE);
    
    # create a new message
    m = Message();

    m.set_body(os.sys.stdin.read());
    
    # publish the message to SQS
    q.write(m);
Example #53
0
    def test_auth_region_name_is_automatically_updated(self):
        region = SQSRegionInfo(name='us-west-2',
                               endpoint='us-west-2.queue.amazonaws.com')
        self.service_connection = SQSConnection(
            https_connection_factory=self.https_connection_factory,
            aws_access_key_id='aws_access_key_id',
            aws_secret_access_key='aws_secret_access_key',
            region=region)
        self.initialize_service_connection()
        self.set_http_response(status_code=200)

        self.service_connection.create_queue('my_queue')
        # Note the region name below is 'us-west-2'.
        self.assertIn('us-west-2/sqs/aws4_request',
                      self.actual_request.headers['Authorization'])
Example #54
0
 def setUp(self):
     # Ensure the queue is clear before we start, or we'll lose more hair
     creds = aws_credentials.get_credentials()
     region_name = 'eu-west-1'
     region = [r for r in regions() if r.name == region_name][0]
     conn = SQSConnection(aws_access_key_id=creds[0],
                          aws_secret_access_key=creds[1],
                          region=region)
     q = conn.create_queue(self.qname, 30)
     cruft = q.get_messages(10)
     while cruft:
         for c in cruft:
             print 'deleting old message %s', c.__dict__
             q.delete_message(c)
         cruft = q.get_messages(10)