def __init__(self,num_threads): # figure out who(m) we are self.hostname = os.uname()[1]; # make a note of when we launched self.start_time = datetime.datetime.now(); # create a connection to SQS conn = SQSConnection(); # ask for the JOB_QUEUE self.jobQueue = conn.get_queue(JOB_QUEUE); # ask for the ERR_QUEUE self.errQueue = conn.get_queue(ERR_QUEUE); # setup reflection for error logging etc self.reflection = Reflection.Reflect(); # init empty list of threads self.threads = []; # make note of the number of requested threads self.num_threads = num_threads; # empty node states timer until we get job self.node_stats_timer = None;
def __init__(self, num_threads): # figure out who(m) we are self.hostname = os.uname()[1] # make a note of when we launched self.start_time = datetime.datetime.now() # create a connection to SQS conn = SQSConnection() # ask for the JOB_QUEUE self.jobQueue = conn.get_queue(JOB_QUEUE) # ask for the ERR_QUEUE self.errQueue = conn.get_queue(ERR_QUEUE) # setup reflection for error logging etc self.reflection = Reflection.Reflect() # init empty list of threads self.threads = [] # make note of the number of requested threads self.num_threads = num_threads # empty node states timer until we get job self.node_stats_timer = None
def commentsubmit(req): sdb = boto.connect_sdb(AWSKey, AWSSecret) domain = sdb.get_domain('comment') form = req.form imagekey = form['imagekey'] user = form['commentuser'] cmt = form['comment'] import uuid from time import strftime guid = str(uuid.uuid1()) item = domain.new_item(guid) item['submituser'] = user item['imagekey'] = imagekey item['comment'] = cmt item['status'] = "processing" item['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") item.save() sqsconn = SQSConnection(AWSKey, AWSSecret) q = sqsconn.get_queue('commentprocess') request = {} request['commentkey'] = guid request['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") request['comment'] = str(cmt) request['submituser'] = str(user) m = RawMessage() m.set_body(json.write(request)) status = q.write(m) response = {} if status==m: response['complete'] = True response['commentkey'] = guid else: response['complete'] = False return json.write(response)
def main(): global task parser = OptionParser( "%prog --daemon_status_id <id> --queue_name <queue_name> \ [--nice <0>] [--stdout <file_name|DEFAULT>] [--stderr <file_name>|STDOUT>] [--debug]" ) parser.add_option( "--daemon_status_id", action="store", type="int", help="The id of the daemon status that launched this Task" ) parser.add_option("--queue_name", action="store", type="string", help="The name of the queue from which to read") parser.add_option("--nice", action="store", type="int", default=0, help="nice this process. defaults to 5.") parser.add_option( "--stdout", action="store", type="string", help="Send stdout to this file, or special value 'DEFAULT' \ sends it a the stream unique to this Task request", ) parser.add_option( "--stderr", action="store", type="string", help="Send stderr to this file, or special value 'STDOUT' sends it to stdout", ) parser.add_option("--debug", action="store_true", help="more messages") (options, args) = parser.parse_args() # option parsing if not options.daemon_status_id or not options.queue_name: sys.exit(parser.get_usage()) log.set_logging_debug(options.debug) if not options.nice == 0: os.nice(options.nice) console_stderr = None try: c = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) q = c.get_queue(options.queue_name) boto_message = q.read() task = __get_task__(boto_message, options.queue_name) if task == None: log.debug("No task in queue '%s' pid:%s" % (options.queue_name, os.getpid())) sys.exit(133) else: log.debug("Starting SQS Queue '%s' Task:%s pid:%s" % (options.queue_name, task.get_id(), os.getpid())) q.delete_message(boto_message) console_stderr = __redirect_outputs__(task, options.stdout, options.stderr) daemon_status = __get_daemon_status__(options.daemon_status_id) __run_task__(task, daemon_status) ending_status = task.get_current_run_status() if ending_status == None: sys.exit(134) if not ending_status.was_successful(): sys.exit(1) except SystemExit, se: # in python 2.4, SystemExit extends Exception, this is changed in 2.5 to # extend BaseException, specifically so this check isn't necessary. But # we're using 2.4; upon upgrade, this check will be unecessary but ignorable. sys.exit(se.code)
def publish_node_stats(self): try: # create a connection to SQS conn = SQSConnection() # ask for the QUEUE q = conn.get_queue(NODE_STATS_QUEUE) # create a new message m = Message() # populate the message with stats m.set_body(self.node_stats()) # publish the message to SQS q.write(m) # schedule another publish self.schedule_node_stats() except Exception as e: # blab about the err on std err os.sys.stderr.write(str(e) + '\n') # log error message to the error queue self.publish_error('publish_node_stats: ' + str(e))
def _process_message(): if not g.sitemap_sqs_queue: return sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) messages = sqs.receive_message(sqs_q, number_messages=1) if not messages: return message, = messages js = json.loads(message.get_body()) s3path = parse_s3_path(js['location']) # There are some error cases that allow us to get messages # for sitemap creation that are now out of date. timestamp = js.get('timestamp') if timestamp is not None and _before_last_sitemap(timestamp): sqs_q.delete_message(message) return g.log.info("Got import job %r", js) subreddits = find_all_subreddits(s3path) store_sitemaps_in_s3(subreddits) sqs_q.delete_message(message)
def op_consume(self, args): sqs = SQSConnection() q = sqs.get_queue('test') q.set_message_class(RawMessage) for message in q.get_messages(): print message.get_body() q.delete_message(message)
def publish_node_stats(self): try: # create a connection to SQS conn = SQSConnection(); # ask for the QUEUE q = conn.get_queue(NODE_STATS_QUEUE); # create a new message m = Message(); # populate the message with stats m.set_body(self.node_stats()); # publish the message to SQS q.write(m); # schedule another publish self.schedule_node_stats(); except Exception as e: # blab about the err on std err os.sys.stderr.write(str(e)+'\n'); # log error message to the error queue self.publish_error('publish_node_stats: '+str(e));
def finshi_task_sns(taskname,node_id): SQS_Id='TaskFinished' time = get_time_now() message = time + '|' + taskname + '|' + str(node_id) conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) my_queue = conn.get_queue(SQS_Id) m = Message() m.set_body(message) my_queue.write(m)
def _config(self, queue_name, aws_access_key_id, aws_secret_access_key, region): cx = SQSConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region=region, is_secure=True) self._queue = cx.get_queue(queue_name) if not self._queue: raise Exception('Unable to load sqs queue %s with access_key_id %s in region %s' % (queue_name, aws_access_key_id, region))
def add_feed_mailserver(mx): from boto.sqs.connection import SQSConnection from boto.sqs.message import Message conn = SQSConnection('ID', 'KEY') q = conn.get_queue('mailserver_list') m = Message() m.set_body(mx.ipaddr) q.write(m) return True
def send_task_sns(taskname,taskdata,taskino,taskitype): SQS_Id=['NewTaskForNode1','NewTaskForNode2','NewTaskForNode3','NewTaskForNode4'] time = get_time_now() message = time + '|' + taskname + '|' + taskdata + '|' + taskino + '|' + taskitype conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) for i in range(int(taskino)): my_queue = conn.get_queue(SQS_Id[i]) m = Message() m.set_body(message) my_queue.write(m)
def timeout(self): from boto.sqs.message import Message if not self._queue and self._connected: try: from boto.sqs.connection import SQSConnection sqs_connection = SQSConnection(self._access_key, self._secret_access_key) self._queue = sqs_connection.get_queue( self._queue_name ) except Exception, e: LOGGER.error('Could not connect to logging queue %s'%self._queue_name, exc_info=e) self._connected = False
def submitimage(req): sdb = boto.connect_sdb(AWSKey, AWSSecret) domain = sdb.get_domain('picture') form = req.form tags = str(form['tags']) user = str(form['submituser']) description = str(form['description']) fileitem = form['image'] import uuid from time import strftime guid = str(uuid.uuid1()) item = domain.new_item(guid) try: # Windows needs stdio set for binary mode. import msvcrt msvcrt.setmode (0, os.O_BINARY) # stdin = 0 msvcrt.setmode (1, os.O_BINARY) # stdout = 1 except ImportError: pass # strip leading path from file name to avoid directory traversal attacks fname = os.path.basename(fileitem.filename) # build absolute path to files directory dir_path = os.path.join(os.path.dirname(req.filename), 'files') open(os.path.join(dir_path, fname), 'wb').write(fileitem.file.read()) from boto.s3.connection import S3Connection conn = S3Connection(AWSKey, AWSSecret) bucket = conn.get_bucket('theimageproject') from boto.s3.key import Key k = Key(bucket) k.key = guid + ".jpg" k.set_contents_from_filename(os.path.join(dir_path, fname)) curtime = strftime("%Y-%m-%dT%H:%M:%S") item['description'] = description item['submituser'] = user item['submitdate'] = curtime item['rating'] = 0 item['ratingcount'] = 0 item['ratesort'] = "%s%s" % (0, curtime) item['status'] = "processing" item['tag'] = tags.split(',') item.save() sqsconn = SQSConnection(AWSKey, AWSSecret) q = sqsconn.get_queue('imageprocess') request = {} request['imagekey'] = guid request['submitdate'] = curtime m = RawMessage() m.set_body(json.write(request)) status = q.write(m) response = {} if status==m: response['success'] = True response['imagekey'] = guid else: response['complete'] = False return json.write(response)
def _create_sqs_message(message): """A dev only function that drops a new message on the sqs queue.""" sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) # it returns None on failure assert sqs_q, "failed to connect to queue" sqs_message = sqs_q.new_message(body=json.dumps(message)) sqs_q.write(sqs_message) g.log.info('Queued SQS message: %r', message)
def _create_sqs_message(message): """A dev only function that drops a new message on the sqs queue.""" sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) # it returns None on failure assert sqs_q, "failed to connect to queue" sqs_message = sqs_q.new_message(body=json.dumps(message)) sqs_q.write(sqs_message) g.log.info('Queued SQS message: %r', message)
def main(): conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) my_queue = conn.get_queue(SQS_Id) while True: print my_queue.count() if my_queue.count()>0 : task = my_queue.read() process(task.get_body()) my_queue.delete_message(task) else: time.sleep(Pull_period)
class SqsTaskQueue(TaskQueue): def __init__(self, sqs_name, brand): self._conn = SQSConnection(ACCESS_KEY, SECRET_KEY) self._q = self._conn.get_queue(sqs_name) self.brand = brand def add(self, task_name, brand, task_params=None, **kwargs): task_params = task_info.get(task_name, {"time_taken": 0}) task_params.update({"brand": self.brand}) payload = {"task_name": task_name, "params": task_params} payload_as_str = json.dumps(payload) self._conn.send_message(self._q, payload_as_str)
class SQSHandler(logging.Handler): # Inherit from logging.Handler def __init__(self, access_key, secret_key, queue): logging.Handler.__init__(self) self.connection = SQSConnection( access_key, secret_key ) self.queue = self.connection.get_queue( queue ) def emit(self, record): if self.queue: m = Message() m.set_body( self.format( record ) ) s = self.queue.write( m )
def timeout(self): from boto.sqs.message import Message if not self._queue and self._connected: try: from boto.sqs.connection import SQSConnection sqs_connection = SQSConnection(self._access_key, self._secret_access_key) self._queue = sqs_connection.get_queue(self._queue_name) except Exception, e: LOGGER.error('Could not connect to logging queue %s' % self._queue_name, exc_info=e) self._connected = False
class SqsTaskQueue(TaskQueue): def __init__(self, sqs_name): self._conn = SQSConnection() self._q = self._conn.get_queue(sqs_name) def add(self, task_name, task_params=None, **kwargs): task_params = task_params or {} payload = { "task_name": task_name, "params" : task_params } payload_as_str = json.dumps(payload) self._conn.send_message(self._q,payload_as_str)
class SQS(object): def __init__(self, config="config.ini"): if isinstance(config, basestring): config = credentials.ConfigFileCredentials(config) elif not isinstance(config, credentials.Credentials): raise TypeError("Unsupported config parameter type") aws_access_key_id, aws_secret_access_key, aws_queue = config.get_data() try: self.conn = SQSConnection(aws_access_key_id, aws_secret_access_key) self.set_queue(aws_queue) except: print 'Error connection' def get_all_queues(self): return self.conn.get_all_queues() def get_queue_attributes(self): return self.conn.get_queue_attributes(self.queue, attribute='All') def create_queue(self, queue, timeout): return self.conn.create_queue(queue, timeout) def set_queue(self, queue): self.queue = self.conn.get_queue(queue) return True def get_messages(self, limit=10): return self.queue.get_messages(limit) def count(self): #print "Count: %s" % self.queue.count() return self.queue.count() def write(self, data): m = Message() m.set_body(json.dumps(data)) return self.queue.write(m) def delete(self, id): #print "Eliminando %s" % id self.queue.delete_message(id) def clear(self): return self.queue.clear() def delete_queue(self): return self.conn.delete_queue(self.queue)
def submit(): if len(sys.argv) < 3: print "Usage:" print " %s -seed <seed> [<spawn coords>]" % sys.argv[0] return # TODO use less crappy command line parsing seed = sys.argv[2] if len(sys.argv) == 4: spawn = [int(x) for x in sys.argv[3].split(",")] assert(len(spawn) == 3) print "Generate world with this seed (\"%s\") with spawn %r [y/N]?" % (seed, spawn) else: spawn = None print "Generate world with this seed (\"%s\") [y/N]?" % seed if raw_input().lower() == 'y': uid = uuid.uuid4() print "Submitting job %s to queue..." % uid sqs = SQSConnection() sdb = SDBConnection() queue = sqs.get_queue("overviewer-genfromseed") db = sdb.get_domain("overviewerdb") print queue print db data = dict() data['uuid'] = str(uid) data['seed'] = seed data['generated'] = False if spawn: data['target_spawn'] = spawn if not db.put_attributes(uid, data): print "***Error: Failed to update the db" return 1 msg = Message() msg.set_body(str(uid)) if not queue.write(msg): print "***Error: Failed to enqueue" return 1 print "Ok, job enqueued" else: print "Ok, not submitting. Bye" return
def broadcast_alert(ipaddr, ipport, mx_pk): from boto.sqs.connection import SQSConnection from boto.sqs.message import Message conn = SQSConnection('ID', 'KEY') sg_obj = ProbeStatus.objects.get(probe='sg') uk_obj = ProbeStatus.objects.get(probe='uk') us_obj = ProbeStatus.objects.get(probe='us') q_uk = conn.get_queue('helomx_to_uk') q_sg = conn.get_queue('helomx_to_sg') q_us = conn.get_queue('helomx_to_us') m = Message() host_to_send = "%s:%s:%s" % (ipaddr, ipport, mx_pk) m.set_body(host_to_send) if uk_obj.status == 'up': q_uk.write(m) if sg_obj.status == 'up': q_sg.write(m) if us_obj.status == 'up': q_us.write(m)
def _create_test_message(): """A dev only function that drops a new message on the sqs queue.""" sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) # it returns None on failure assert sqs_q, "failed to connect to queue" message = sqs_q.new_message(body=json.dumps({ 'job_name': 'daily-sr-sitemap-reporting', 'location': ('s3://reddit-data-analysis/big-data/r2/prod/' + 'daily_sr_sitemap_reporting/dt=2016-06-14'), 'timestamp': _current_timestamp(), })) sqs_q.write(message)
def main(): # create a connection to SQS conn = SQSConnection(); # ask for the JOB_QUEUE q = conn.get_queue(JOB_QUEUE); # create a new message m = Message(); m.set_body(os.sys.stdin.read()); # publish the message to SQS q.write(m);
def submit(): if len(sys.argv) < 3: print "Usage:" print " %s -submit <world uuid>" % sys.argv[0] return sdb = SDBConnection() db = sdb.get_domain("overviewerdb") # TODO use less crappy command line parsing world_uuid = uuid.UUID(sys.argv[2]) world_item = db.get_item(world_uuid) if not world_item: print "Can't find that world!" return 1 print "Submit this world for rendering? [y/N]" if raw_input().lower() != 'y': return "Ok, nevermind." return 0 from boto.sqs.connection import SQSConnection sqs = SQSConnection() queue = sqs.get_queue("overviewer-render") render_uuid = uuid.uuid4() print "Render UUID:", render_uuid data = dict() data['uuid'] = str(render_uuid) data['rendered'] = False data['world_uuid'] = str(world_uuid) if not db.put_attributes(str(render_uuid), data): print "***Error: Failed to update the db" return 1 msg = Message() msg.set_body(str(render_uuid)) if not queue.write(msg): print "***Error: Failed to enqueue" return 1 print "Ok, job enqueued" return 0
def _recieve_sqs_message(): sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) messages = sqs.receive_message(sqs_q, number_messages=1) if not messages: yield return message, = messages js = json.loads(message.get_body()) g.log.info('Received import job %r', js) yield _normalize_sqs_message(js) sqs_q.delete_message(message)
def _recieve_sqs_message(): sqs = SQSConnection() sqs_q = sqs.get_queue(g.sitemap_sqs_queue) messages = sqs.receive_message(sqs_q, number_messages=1) if not messages: yield return message, = messages js = json.loads(message.get_body()) g.log.info('Received import job %r', js) yield _normalize_sqs_message(js) sqs_q.delete_message(message)
def update(request): # extract data data = json.loads(request.body.decode('utf-8')) region = data.get("region") keys = data.get("keys") # ensure the data is valid if None in (region, keys): return HttpResponse(INVALID_REQUEST_FORMAT) # update the summoners conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) queue = conn.get_queue("portal") message = RawMessage() message.set_body(json.dumps({"region": region, "keys": keys})) queue.write(message) # successful return return HttpResponse("success")
def lecturasqs(): AWS_ACCESS_KEY_ID = 'AKIAJK6M2ZU2J66WGP2Q' AWS_SECRET_ACCESS_KEY = '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' conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) q = conn.get_queue('myqueue') #seleccionamos el queue rs = q.get_messages(10) #la cantidad de mensajes a que va a leer f = open("/home/ubuntu/openerp/msgcolas.txt", "a") for x in range(len(rs)): lista = json.loads(rs[x].get_body()) #falta definir los campos reales con que vvamos a trabajar en nuestra db (incompleto) importe_total = float(lista[3]) sub_total = importe_total/1.18 igvv = importe_total - sub_total #borrar f.write(str(lista)+'\n') f.close
def main(): # create a connection to SQS conn = SQSConnection(); # ask for the JOB_QUEUE q = conn.get_queue(JOB_QUEUE); while True: # snag a mutha f****n message m = q.get_messages() # empty queue check if len(m) == 0: print "queue ",JOB_QUEUE,' is empty' return; # dereference the list m = m[0]; # blab print m.get_body();
def pushStatus(self): # get the status status = self.getStatus() # create a connection to SQS conn = SQSConnection() # ask for the QUEUE q = conn.get_queue(QUEUE) # create a new message m = Message() # set the message body to the status m.set_body(str(status)) # publish the message to SQS q.write(m) # blab about it for now print str(status)
def pushStatus(self): # get the status status = self.getStatus(); # create a connection to SQS conn = SQSConnection(); # ask for the QUEUE q = conn.get_queue(QUEUE); # create a new message m = Message(); # set the message body to the status m.set_body(str(status)); # publish the message to SQS q.write(m); # blab about it for now print str(status);
def __init__(self, *args, **kwargs): self._settings = kwargs.get('settings') try: connection_kwargs = { 'aws_access_key_id': self._settings['KEY_ID'], 'aws_secret_access_key': self._settings['KEY'] } if self._settings.get('LOCAL_CLIENT_VALIDATION', False): connection_kwargs['https_connection_factory'] = ( https_connection_factory, ()) connection = SQSConnection(**connection_kwargs) if connection is None: raise SNSException('no connection') self._queue = connection.get_queue(self._settings.get('QUEUE')) if self._queue is None: raise SNSException('no queue') self._queue.set_message_class(RawMessage) except KeyError: self._queue = SQSQueueMock(*args, **kwargs)
#!/usr/bin/python import timeit import gc import urllib import simplejson from random import randrange from time import sleep, time from boto.sqs.connection import SQSConnection probe_loc = 'uk' queue_name = 'helomx_to_%s' % probe_loc conn = SQSConnection('', '') q_incoming = conn.get_queue(queue_name) q_return = conn.get_queue('helomx_return') # We clear the queue just in case this probe was down, so we don't keep # scanning the mailserver over and over. q_incoming.clear() def check_port(ip_parts): now = int(time()) try: z = """\ import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(1.0) s.connect(('%s', %d)) s.close() """ % (ip_parts[0], int(ip_parts[1])) t = timeit.Timer(stmt=z).timeit(number=1)*1000
def add_friend(request): # extract data data = json.loads(request.body.decode('utf-8')) region = data.get("region") user_key = data.get("user_key") friend_key = data.get("friend_key") # ensure the data is valid if None in (region, user_key, friend_key): return HttpResponse(json.dumps(INVALID_REQUEST_FORMAT)) # ensure proper key format user_key = format_key(user_key) friend_key = format_key(friend_key) # make sure friend is not the user if user_key == friend_key: return HttpResponse(json.dumps(FRIEND_EQUALS_USER)) try: # get the users summoner object user_o = cache.get(region + user_key + "summoner") if user_o is None: user_o = Summoner.objects.get(region=region, key=user_key) cache.set(region + user_key + "summoner", user_o, None) Summoner.objects.filter(pk=user_o.pk).update(accessed=datetime.now()) except Summoner.DoesNotExist: return HttpResponse(json.dumps(SUMMONER_NOT_IN_DATABASE)) # check if user is at friend limit or if friend is already listed if user_o.friends is not None: friends = user_o.friends.split(",") if len(friends) >= 20: return HttpResponse(json.dumps(FRIEND_LIMIT_REACHED)) for friend in friends: if friend == friend_key: return HttpResponse(json.dumps(FRIEND_ALREADY_LISTED)) try: # get the friends summoner object friend_o = cache.get(region + friend_key + "summoner") if friend_o is None: friend_o = Summoner.objects.get(region=region, key=friend_key) cache.set(region + friend_key + "summoner", friend_o, None) Summoner.objects.filter(pk=friend_o.pk).update(accessed=datetime.now()) except Summoner.DoesNotExist: try: # summoner not in database, request summoner data from riot args = {"request": 1, "key": friend_key} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_DOES_NOT_EXIST)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the summoner friend = riot_response.get(friend_key) except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # use the summoner id to get the friends league information args = {"request": 4, "summoner_ids": friend.id} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the league data leagues = riot_response.get(str(friend.id)) # iterate over the leagues looking for the dynamic queue league league = None for item in leagues: if item.queue == "RANKED_SOLO_5x5": league = item # ensure the dynamic queue league was found if league is None: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) # iterate over the league entries to get more detailed information division, lp, wins, losses, series = None, None, None, None, "" for entry in league.entries: if entry.playerOrTeamId == str(friend.id): division = entry.division lp = entry.leaguePoints wins = entry.wins losses = entry.losses if entry.miniSeries is not None: series = entry.miniSeries.progress except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # use the gathered information to create a summoner object friend_o = Summoner.objects.create( region=region, key=friend_key, name=friend.name, summoner_id=friend.id, tier=league.tier, division=division, lp=lp, wins=wins, losses=losses, series=series, profile_icon=friend.profileIconId) except IntegrityError: return HttpResponse(json.dumps(INTERNAL_PROCESSING_ERROR)) # update the newly created summoner conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) queue = conn.get_queue("portal") message = RawMessage() message.set_body(json.dumps({"region": region, "keys": [friend_key]})) queue.write(message) # add the friends key to the users friend list if user_o.friends != "": user_o.friends += "," + friend_key else: user_o.friends = friend_key Summoner.objects.filter(pk=user_o.pk).update(friends=user_o.friends) cache.set(region + user_key + "summoner", user_o, None) # return the friends summoner object return HttpResponse(summoner_serializer(friend_o, None, False))
def login_user_1_1(request): # extract data data = json.loads(request.body.decode('utf-8')) region = data.get("region") key = data.get("key") # ensure the data is valid if None in (region, key): return HttpResponse(json.dumps(INVALID_REQUEST_FORMAT)) # ensure proper key format key = format_key(key) try: # get the summoner object summoner_o = cache.get(region + key + "summoner") if summoner_o is None: summoner_o = Summoner.objects.get(region=region, key=key) cache.set(region + key + "summoner", summoner_o, None) Summoner.objects.filter(pk=summoner_o.pk).update( accessed=datetime.now()) # return the users summoner object return HttpResponse(summoner_serializer(summoner_o, None, False)) except Summoner.DoesNotExist: pass try: # summoner not in database, request summoner data from riot args = {"request": 1, "key": key} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_DOES_NOT_EXIST)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the summoner summoner = riot_response.get(key) # extract summoner fields summoner_id = summoner.id name = summoner.name profile_icon = summoner.profileIconId except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # use summoner id to get league information args = {"request": 4, "summoner_ids": summoner_id} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the league data leagues = riot_response.get(str(summoner_id)) # iterate over the leagues looking for the dynamic queue league league = None for item in leagues: if item.queue == "RANKED_SOLO_5x5": league = item # ensure the dynamic queue league was found if league is None: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) # iterate over the league entries to get more detailed information division, lp, wins, losses, series = None, None, None, None, "" for entry in league.entries: if entry.playerOrTeamId == str(summoner_id): division = entry.division lp = entry.leaguePoints wins = entry.wins losses = entry.losses if entry.miniSeries is not None: series = entry.miniSeries.progress # extract the tier information tier = league.tier except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # use the gathered information to create a summoner object summoner_o = Summoner.objects.create(region=region, key=key, name=name, summoner_id=summoner_id, tier=tier, division=division, lp=lp, wins=wins, losses=losses, series=series, profile_icon=profile_icon) except IntegrityError: return HttpResponse(json.dumps(INTERNAL_PROCESSING_ERROR)) # update the newly created summoner conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) queue = conn.get_queue("portal") message = RawMessage() message.set_body(json.dumps({"region": region, "keys": [key]})) queue.write(message) # return the users summoner object return HttpResponse(summoner_serializer(summoner_o, None, False))
def register_user(request): # extract data data = json.loads(request.body.decode('utf-8')) region = data.get("region") key = data.get("key") email = data.get("email") password = data.get("password") code = data.get("code") # ensure the data is valid if None in (region, key, email, password, code): return HttpResponse(json.dumps(INVALID_REQUEST_FORMAT)) # ensure proper key format key = format_key(key) # initialize summoner object value to None summoner_o = None # initialize riot response summoner value to None summoner = None try: # get the summoner object summoner_o = cache.get(region + key + "summoner") if summoner_o is None: summoner_o = Summoner.objects.get(region=region, key=key) cache.set(region + key + "summoner", summoner_o, None) Summoner.objects.filter(pk=summoner_o.pk).update( accessed=datetime.now()) # check if the user object already exists if summoner_o.user is not None: return HttpResponse(json.dumps(SUMMONER_ALREADY_REGISTERED)) # get the summoner id summoner_id = summoner_o.summoner_id except Summoner.DoesNotExist: try: # summoner not in database, request summoner data from riot args = {"request": 1, "key": key} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_DOES_NOT_EXIST)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the summoner summoner = riot_response.get(key) # get the summoner id summoner_id = summoner.id except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # use the summoner id to get rune page information to validate ownership args = {"request": 6, "summoner_id": summoner_id} riot_response = riot_request(region, args) # extract the summoners rune pages rune_pages = riot_response.get(str(summoner_id)).pages # iterate over the pages looking for one whose name matches the code no_match = True for page in rune_pages: if page.name == code: no_match = False break # return error if no match found if no_match: return HttpResponse(json.dumps(RUNE_PAGE_CODE_NOT_FOUND)) except (APIError, AttributeError): return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) # hash password password = hashers.make_password(password) # if summoner object already exists wrap up registration if summoner_o is not None: # create a user object for the summoner object summoner_o.user = User.objects.create(email=email, password=password) Summoner.objects.filter(pk=summoner_o.pk).update(user=summoner_o.user) # return the users summoner object with the email included return HttpResponse(summoner_serializer(summoner_o, email, False)) try: # summoner object did not already exist, use summoner id to get league information args = {"request": 4, "summoner_ids": summoner_id} riot_response = riot_request(region, args) except APIError as e: if e.error_code == 404: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) else: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) try: # extract the league data leagues = riot_response.get(str(summoner_id)) # iterate over the leagues looking for the dynamic queue league league = None for item in leagues: if item.queue == "RANKED_SOLO_5x5": league = item # ensure the dynamic queue league was found if league is None: return HttpResponse(json.dumps(SUMMONER_NOT_RANKED)) # iterate over the league entries to get more detailed information division, lp, wins, losses, series = None, None, None, None, "" for entry in league.entries: if entry.playerOrTeamId == str(summoner_id): division = entry.division lp = entry.leaguePoints wins = entry.wins losses = entry.losses if entry.miniSeries is not None: series = entry.miniSeries.progress except AttributeError: return HttpResponse(json.dumps(INVALID_RIOT_RESPONSE)) # create a new user object user_o = User.objects.create(email=email, password=password) try: # use the gathered information to create a summoner object summoner_o = Summoner.objects.create( user=user_o, region=region, key=key, name=summoner.name, summoner_id=summoner_id, tier=league.tier, division=division, lp=lp, wins=wins, losses=losses, series=series, profile_icon=summoner.profileIconId) except IntegrityError: return HttpResponse(json.dumps(INTERNAL_PROCESSING_ERROR)) # update the newly created summoner conn = SQSConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) queue = conn.get_queue("portal") message = RawMessage() message.set_body(json.dumps({"region": region, "keys": [key]})) queue.write(message) # return the users summoner object with the email included return HttpResponse(summoner_serializer(summoner_o, email, False))
class SQSAuthParams(AWSMockServiceTestCase): connection_class = SQSConnection def setUp(self): super(SQSAuthParams, self).setUp() def default_body(self): return """<?xml version="1.0"?> <CreateQueueResponse> <CreateQueueResult> <QueueUrl> https://queue.amazonaws.com/599169622985/myqueue1 </QueueUrl> </CreateQueueResult> <ResponseMetadata> <RequestId>54d4c94d-2307-54a8-bb27-806a682a5abd</RequestId> </ResponseMetadata> </CreateQueueResponse>""" def test_auth_service_name_override(self): self.set_http_response(status_code=200) # We can use the auth_service_name to change what service # name to use for the credential scope for sigv4. self.service_connection.auth_service_name = 'service_override' self.service_connection.create_queue('my_queue') # Note the service_override value instead. self.assertIn('us-east-1/service_override/aws4_request', self.actual_request.headers['Authorization']) def test_class_attribute_can_set_service_name(self): self.set_http_response(status_code=200) # The SQS class has an 'AuthServiceName' param of 'sqs': self.assertEqual(self.service_connection.AuthServiceName, 'sqs') self.service_connection.create_queue('my_queue') # And because of this, the value of 'sqs' will be used instead of # 'queue' for the credential scope: self.assertIn('us-east-1/sqs/aws4_request', self.actual_request.headers['Authorization']) def test_auth_region_name_is_automatically_updated(self): region = SQSRegionInfo(name='us-west-2', endpoint='us-west-2.queue.amazonaws.com') self.service_connection = SQSConnection( https_connection_factory=self.https_connection_factory, aws_access_key_id='aws_access_key_id', aws_secret_access_key='aws_secret_access_key', region=region) self.initialize_service_connection() self.set_http_response(status_code=200) self.service_connection.create_queue('my_queue') # Note the region name below is 'us-west-2'. self.assertIn('us-west-2/sqs/aws4_request', self.actual_request.headers['Authorization']) def test_set_get_auth_service_and_region_names(self): self.service_connection.auth_service_name = 'service_name' self.service_connection.auth_region_name = 'region_name' self.assertEqual(self.service_connection.auth_service_name, 'service_name') self.assertEqual(self.service_connection.auth_region_name, 'region_name') def test_get_queue_with_owner_account_id_returns_queue(self): self.set_http_response(status_code=200) self.service_connection.create_queue('my_queue') self.service_connection.get_queue('my_queue', '599169622985') assert 'QueueOwnerAWSAccountId' in self.actual_request.params.keys() self.assertEquals(self.actual_request.params['QueueOwnerAWSAccountId'], '599169622985')
coll_names = db.collection_names() result = {} for m in modes: result[m[0]] = [0]*MAX_DEMES for subm in [c for c in coll_names if c.startswith('gp_ '+m[0])]: idx = int(subm[4 + len(m[0]):]) if idx < MAX_DEMES: coll = db[subm] result[m[0]][idx] = coll.count() return result conn = SQSConnection(AWS_ACCESS, AWS_SECRET) task_queue = conn.get_queue('GP_tasks') print("Current progress:") pg = assess_progress() print pg if task_queue.count() > 0: raise RuntimeError("There are still tasks to be processed.") jobs = [] # The ideal execution order for tasks, is to fill up lower-order demes first, and to cycle # through modes as much as possible to prevent servants from duplication generations # (it's not wasted effort, but it's not ideal either) # for n_bucket in xrange(0, MAX_DEMES):
import os from boto.sqs.connection import SQSConnection AWS_SQS_ACCESS_KEY_ID = os.environ['AWS_SQS_ACCESS_KEY_ID'] AWS_SQS_SECRET_ACCESS_KEY = os.environ['AWS_SQS_SECRET_ACCESS_KEY'] conn = SQSConnection(AWS_SQS_ACCESS_KEY_ID, AWS_SQS_SECRET_ACCESS_KEY) queue = conn.get_queue('panda-prod-bulk_upload') while 1: messages = queue.get_messages(num_messages=10, wait_time_seconds=5) for m in messages: queue.delete_message(m) if len(messages) == 0: break print "done"
#!/usr/bin/env python from boto.sqs.connection import SQSConnection from boto.sqs.message import Message conn = SQSConnection('AKIAIFNNIT7VXOXVFPIQ', 'stNtF2dlPiuSigHNcs95JKw06aEkOAyoktnWqXq+') q = conn.get_queue('dwpTestQueue') def writeToSQS(messageBody): # Note that messages are base64 encoded. m1 = Message() m1.set_body(messageBody) q.write(m1) def pingSQS(): print 'OMF message count:', q.count() def peakSingleMessage(): print[m.get_body() for m in q.get_messages(1)] def eatSingleMessage(): pull = q.get_messages(1) if len(pull) == 0: return False else: m = pull[0] q.delete_message(m)
class SQSAuthParams(AWSMockServiceTestCase): connection_class = SQSConnection def setUp(self): super(SQSAuthParams, self).setUp() def default_body(self): return """<?xml version="1.0"?> <CreateQueueResponse> <CreateQueueResult> <QueueUrl> https://queue.amazonaws.com/599169622985/myqueue1 </QueueUrl> </CreateQueueResult> <ResponseMetadata> <RequestId>54d4c94d-2307-54a8-bb27-806a682a5abd</RequestId> </ResponseMetadata> </CreateQueueResponse>""" def test_auth_service_name_override(self): self.set_http_response(status_code=200) # We can use the auth_service_name to change what service # name to use for the credential scope for sigv4. self.service_connection.auth_service_name = 'service_override' self.service_connection.create_queue('my_queue') # Note the service_override value instead. self.assertIn('us-east-1/service_override/aws4_request', self.actual_request.headers['Authorization']) def test_class_attribute_can_set_service_name(self): self.set_http_response(status_code=200) # The SQS class has an 'AuthServiceName' param of 'sqs': self.assertEqual(self.service_connection.AuthServiceName, 'sqs') self.service_connection.create_queue('my_queue') # And because of this, the value of 'sqs' will be used instead of # 'queue' for the credential scope: self.assertIn('us-east-1/sqs/aws4_request', self.actual_request.headers['Authorization']) def test_auth_region_name_is_automatically_updated(self): region = SQSRegionInfo(name='us-west-2', endpoint='us-west-2.queue.amazonaws.com') self.service_connection = SQSConnection( https_connection_factory=self.https_connection_factory, aws_access_key_id='aws_access_key_id', aws_secret_access_key='aws_secret_access_key', region=region) self.initialize_service_connection() self.set_http_response(status_code=200) self.service_connection.create_queue('my_queue') # Note the region name below is 'us-west-2'. self.assertIn('us-west-2/sqs/aws4_request', self.actual_request.headers['Authorization']) def test_set_get_auth_service_and_region_names(self): self.service_connection.auth_service_name = 'service_name' self.service_connection.auth_region_name = 'region_name' self.assertEqual(self.service_connection.auth_service_name, 'service_name') self.assertEqual(self.service_connection.auth_region_name, 'region_name') def test_get_queue_with_owner_account_id_returns_queue(self): self.set_http_response(status_code=200) self.service_connection.create_queue('my_queue') self.service_connection.get_queue('my_queue', '599169622985') assert 'QueueOwnerAWSAccountId' in self.actual_request.params.keys() self.assertEquals(self.actual_request.params['QueueOwnerAWSAccountId'], '599169622985')