Ejemplo n.º 1
0
 def _conectar(self):
     self._logger.debug("Regiao AWS: %s" % self._url.netloc)
     if self._config.has_aws_auth:
         self._logger.debug("Realizando autenticacao completa na AWS")
         self._conn = sqs.connect_to_region(self._url.netloc,
                                            **self._config.aws_auth)
     else:
         self._logger.debug("Realizando autenticacao automatica na AWS")
         self._conn = sqs.connect_to_region(self._url.netloc)
     if not self._conn:
         raise MQError(None, 2)
     self._queue = self._conn.get_queue(self._url.path[1:])
Ejemplo n.º 2
0
	def __init__(self):
		# Get the parameters
		params = {}
		params['aws_access_key_id'] = os.environ["AWS_ACCESS_KEY_ID"]
		params['aws_secret_access_key'] = os.environ["AWS_SECRET_KEY"]

		self.conn = sqs.connect_to_region('us-east-1', **params)
 def __init__(self, *args, **kwargs):
     self.region = kwargs.get('region', None)
     self.access_key = kwargs.get('access_key', None)
     self.secret_key = kwargs.get('secret_key', None)
     self.__queue_connection = sqs.connect_to_region(
         self.region, aws_access_key_id=self.access_key,
         aws_secret_access_key=self.secret_key)
Ejemplo n.º 4
0
    def receive_message(self, max_time=3600, save_message=True):
        '''
        Connect to the queue and read the next message.
        '''

        queue = sqs.connect_to_region(self.region,
                                           aws_access_key_id=self.key,
                                           aws_secret_access_key=self.secret).create_queue(self.queue_name)
        # Get the message from the queue within some max time
        mess = queue.read(max_time)

        if mess is None:
            self.empty_flag = True
            self.success = False
            self.message_dict['receive_message'] = "Found no message to read."
        else:
            contents = json.loads(mess.get_body())

            self.proc_name = contents['proc_name']
            self.bucket_name = contents['bucket']
            self.key_name = contents['key_name']
            self.command = contents['command']

            try:
                queue.delete_message(mess)
            except Exception:
                print("No message to delete.")

            if save_message:
                with open("data/params.txt", "w") as f:
                    json.dump(contents, f)

            self.message_dict['receive_message'] = "Successfully read message."
Ejemplo n.º 5
0
    def collect(self):
        attribs = ['ApproximateNumberOfMessages',
                   'ApproximateNumberOfMessagesNotVisible',
                   'ApproximateNumberOfMessagesDelayed',
                   'CreatedTimestamp',
                   'DelaySeconds',
                   'LastModifiedTimestamp',
                   'MaximumMessageSize',
                   'MessageRetentionPeriod',
                   'ReceiveMessageWaitTimeSeconds',
                   'VisibilityTimeout']
        if not sqs:
            self.log.error("boto module not found!")
            return
        for (region, region_cfg) in self.config['regions'].items():
            assert 'access_key_id' in region_cfg
            assert 'secret_access_key' in region_cfg
            assert 'queues' in region_cfg
            queues = region_cfg['queues'].split(',')
            for queue_name in queues:
                conn = sqs.connect_to_region(
                    region,
                    aws_access_key_id=region_cfg['access_key_id'],
                    aws_secret_access_key=region_cfg['secret_access_key'],
                )
                queue = conn.get_queue(queue_name)

                for attrib in attribs:
                    d = queue.get_attributes(attrib)
                    self.publish(
                        '%s.%s.%s' % (region, queue_name, attrib),
                        d[attrib]
                    )
Ejemplo n.º 6
0
    def collect(self):
        if not sqs:
            self.log.error("boto module not found!")
            return
        conn = sqs.connect_to_region(
            self.config['aws_region'],
            aws_access_key_id=self.config['aws_access_key_id'],
            aws_secret_access_key=self.config['aws_secret_access_key'])
        queue = conn.get_queue(self.config['sqs_queue'])
        attribs = ['ApproximateNumberOfMessages',
                   'ApproximateNumberOfMessagesNotVisible',
                   'ApproximateNumberOfMessagesDelayed',
                   'CreatedTimestamp',
                   'DelaySeconds',
                   'LastModifiedTimestamp',
                   'MaximumMessageSize',
                   'MessageRetentionPeriod',
                   'ReceiveMessageWaitTimeSeconds',
                   'VisibilityTimeout']

        for attrib in attribs:
            d = queue.get_attributes(attrib)
            self.publish('%s.%s.%s' % (self.config['aws_region'],
                                       self.config['sqs_queue'],
                                       attrib),
                         d[attrib])
 def __init__(self, queue_name, queue_account, stack_name, region='eu-west-1',):
     self.logger = logging.getLogger(__name__)
     self.stack_name = stack_name
     self.sqs_connection = sqs.connect_to_region(region)
     self.sqs_queue = self.sqs_connection.get_queue(queue_name=queue_name, owner_acct_id=queue_account)
     if not self.sqs_queue:
         raise Exception("Unable to find SQS queue for name: {0} in account: {1}"
                         .format(queue_name, queue_account))
Ejemplo n.º 8
0
 def __init__(self, redis_host='localhost'):
     self.protocol = conf['PROTOCOL']
     self.channels = dict()
     self.redis_session = redis.StrictRedis(host=redis_host)
     sqs_conn = sqs.connect_to_region('ap-northeast-1')
     self.notification_queue = sqs_conn.get_queue(conf['NOTIFICATION_QUEUE_NAME'])
     self.log_queue = sqs_conn.get_queue(conf['LOG_QUEUE_NAME'])
     Transmitter(self).start()
Ejemplo n.º 9
0
    def test_delete_queue(self):
        sid = uuid4().hex
        queue = Queue(sid)

        self.assertIsInstance(queue.queue, SQSQueue)
        queue.delete()

        assert not connect_to_region("us-east-1").get_queue(sid)
Ejemplo n.º 10
0
def make_SQS_connection(region_name, aws_access_key_id, aws_secret_access_key):
    """
    Make an SQSconnection to an AWS account. Pass in region, AWS access
    key id, and AWS secret access key
    """
    return connect_to_region(region_name,
                             aws_access_key_id=aws_access_key_id,
                             aws_secret_access_key=aws_secret_access_key)
Ejemplo n.º 11
0
    def __init__(self, region="eu-west-1"):
        logging.basicConfig(format='%(asctime)s %(levelname)s %(module)s: %(message)s',
                            datefmt='%d.%m.%Y %H:%M:%S',
                            level=logging.INFO)
        self.logger = logging.getLogger(__name__)
        logging.getLogger('boto').setLevel(logging.ERROR)

        self.conn = sqs.connect_to_region(region)
        self.queue = self._get_queue_instance("is24-cfn-custom-resources")
Ejemplo n.º 12
0
    def send_result_message(self, resp_queue_name):
        resp_queue = sqs.connect_to_region(self.region,
                                           aws_access_key_id=self.key,
                                           aws_secret_access_key=self.secret).create_queue(resp_queue_name)
        resp_message = {'proc_name': self.proc_name,
                        'success': self.success,
                        'messages': self.message_dict}

        mess = resp_queue.new_message(body=json.dumps(resp_message))
        resp_queue.write(mess)
Ejemplo n.º 13
0
	def __init__(self, broker, queue_name, zone):		
		Thread.__init__(self)
		self._sqs_conn = sqs.connect_to_region(zone)
		self._queue = self._sqs_conn.get_queue(queue_name)
		self._broker = broker
		if self._queue == None:
			self._queue = self._sqs_conn.create_queue(queue_name)
		self._handler = None
		self._messages_queue = None

		print "Inizialiazzazione completa"
Ejemplo n.º 14
0
def create_backend(region, queue_name_prefix):

    # Trigger the boto authentication checks.
    # This does not actually connect.
    try:
        sqs.connect_to_region(region)
    except NoAuthHandlerFound:
        logging.error('Boto is not configured to connect to SQS.')
        raise

    return SQSBackend(
        label='SQS',
        connection_class=Connection,
        connection_params={
            'region': region,
            'queue_name_prefix': queue_name_prefix,
        },
        message_queue_class=MessageQueue,
        connection_errors=(
            QueueNotFound,
        ),
    )
Ejemplo n.º 15
0
def poll():
	from boto import sqs
	import time
	conn = sqs.connect_to_region("us-west-2") # credentials are stored as environment variables on the server
	q = conn.get_queue('splintera')
	while True:
		if q.count()>0:# only gets an 'approximate' count, for speed reasons
			rs = q.get_messages()# by default, gets 1 message
			if len(rs)>0:
				m = rs[0]
				trace_id = int(m.get_body())
				process_data(trace_id)
				q.delete_message(m)
		time.sleep(5)
Ejemplo n.º 16
0
def sqs_connection(config):
    """
        Initialize Boto SQS Client from FLask application configuration.

        Arguments:
            config (flask.config.Config)

        Return:
            sns_con (boto.sqs.connection.SQSConnection)
        """
    sqs_con = sqs.connect_to_region(config['AWS_REGION'], aws_access_key_id=config['AWS_ACCESS_KEY_ID'],
                                    aws_secret_access_key=config['AWS_SECRET_ACCESS_KEY'])

    return sqs_con
Ejemplo n.º 17
0
def main():
    conn = sqs.connect_to_region(config.SQS_REGION)
    q = conn.create_queue(config.SQS_QUEUE_NAME)
    while True:
        messages = q.get_messages(
            num_messages=5,
            wait_time_seconds=config.SQS_WAIT_TIME)
        if messages:

            for m in messages:
                process_message(m)
                m.delete()
        else:
            sleep(config.SLEEP_TIME)
Ejemplo n.º 18
0
    def __init__(self, name, connection=None):
        """
        Initialize an SQS queue.

        This will create a new boto SQS connection in the background, which will
        be used for all future queue requests.  This will speed up communication
        with SQS by taking advantage of boto's connection pooling functionality.

        :param str name: The name of the queue to use.
        :param obj connection: [optional] Either a boto connection object, or None.
        """
        self.name = name
        self.connection = connection or connect_to_region('us-east-1')
        self._queue = None
Ejemplo n.º 19
0
    def __init__(self, redis_host='localhost'):
        self.protocol = conf['PROTOCOL']
        self.clients_by_channel_name = dict()
        self.clients_by_user_id = dict()

        self.redis_session = redis.StrictRedis(host=redis_host)
        sqs_conn = sqs.connect_to_region('ap-northeast-1')
        self.notification_queue = sqs_conn.get_queue(conf['NOTIFICATION_QUEUE_NAME'])
        self.log_queue = sqs_conn.get_queue(conf['LOG_QUEUE_NAME'])
        self.api_queue = sqs_conn.get_queue(conf['API_QUEUE_NAME'])

        Transmitter(self).start()
        if conf['API_PROCESSOR']:
            ApiListener(self, conf['API_PROCESSOR']).start()
Ejemplo n.º 20
0
 def __init__(self, name, aws_access_key_id, aws_secret_access_key, region="us-east-1", ttl=7200):
     """
     :param name:
     :param aws_access_key_id:
     :param aws_secret_access_key:
     :param region:
     :param ttl:
     :return:
     """
     self._pools = [QUEUED_POOL, FINISHED_POOL]
     self.name = name
     self.ttl = ttl
     self.sqs_conn = sqs.connect_to_region(
         region, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key
     )
Ejemplo n.º 21
0
def get_sqs_connection(queue):
    '''Connect to the SQS queue
       :param AMAZON_REGION: Amazon region
       :param AMAZON_ACCESS_KEY_ID: Access key for AWS
       :param SECRET_ACCESS_KEY: Secret key for AWS
    '''

    from boto import sqs

    connection = sqs.connect_to_region(
        os.environ['AMAZON_REGION'],
        aws_access_key_id=os.environ['AMAZON_ACCESS_KEY_ID'],
        aws_secret_access_key=os.environ['SECRET_ACCESS_KEY'])
    queue_obj = connection.get_queue(queue)

    return queue_obj
Ejemplo n.º 22
0
 def put_sqs_tasks(self):
     """ Create an SQS tasks for this analysis job """
     print "Populate SQS input queue with tasks"
     # Connect to SQS is desired region
     conn = sqs.connect_to_region(
         self.aws_region,
         aws_access_key_id = self.aws_key,
         aws_secret_access_key = self.aws_secret_key
     )
     # Create queue
     queue = conn.get_queue(self.sqs_input_name)
     queue.set_message_class(JSONMessage)
     # Populate queue with tasks
     for task in self.generate_tasks():
         msg = queue.new_message(body = task)
         queue.write(msg)
     conn.close()
Ejemplo n.º 23
0
    def handle_sns_subscription(cls, resource_description, stack):
        logger = get_logger()
        queue_prefix = stack.name + '-' + resource_description['Properties']['QueueResourceName']
        topic_region = resource_description['Properties'].get('TopicRegion', stack.region)
        topic_arn = cls.extract_topic_arn(resource_description, stack.parameters)

        sqs_conn = sqs.connect_to_region(stack.region)
        sns_conn = sns.connect_to_region(topic_region)

        queues = sqs_conn.get_all_queues(prefix=queue_prefix)
        if len(queues) != 1:
            raise CfnSphereException(
                "Found {0} queues matching the prefix: {1}. Should be 1.".format(len(queues), queue_prefix))

        queue = queues[0]

        logger.info("Subscribing queue {0} to topic {1} in {2}".format(queue.name, topic_arn, topic_region))
        sns_conn.subscribe_sqs_queue(topic_arn, queue)
Ejemplo n.º 24
0
    def boto_put_threads(self, threads=2):

        # 31mb around the beginning
        # 32mb after roughly 1800 messages
        # 32mb after roughly 4500 messages

        if not self.aws_region:
            raise ValueError('aws_region')

        queue_name = 'hammermq-test'

        connection = sqs.connect_to_region(self.aws_region)
        queue = connection.create_queue(queue_name)
        queue.set_message_class(Message)

        message = Message()
        message.set_body('test')

        self.running = True

        counter = itertools.count()
        counter_lock = threading.Lock()

        def putter():
            while self.running:

                with counter_lock:
                    print 'Loop %d' % next(counter)

                result = queue.write(message)
                assert result

                print 'Memory:', memory_usage()

        for x in range(threads):
            thread = threading.Thread(target=putter)
            thread.daemon = True
            thread.start()

        try:
            while self.running:
                time.sleep(1)
        except KeyboardInterrupt:
            self.running = False
Ejemplo n.º 25
0
def push_to_queue(path, width, height, gray):
	'''
	Pune mesajele in coada.
	path = path-ul din s3. ar fi bine sa fie unic
	ex: 'videos/21242312/jumbix.mp4'
	width: latimea
	height: inaltimea
	gray: True sau False, semnifica daca se vrea convertirea la B/W
	'''
	queue = sqs.connect_to_region("eu-west-1")
	q = queue.get_queue('video-converter-sqs')

	vals = {
		'path' : path,
		'width' : width,
		'height' : height,
		'gray' : gray
	}

	m = JSONMessage()
	m.set_body(vals)
	q.write(m)
Ejemplo n.º 26
0
Archivo: main.py Proyecto: forter/boten
def main(conf_file):
    utils.setup_logging(False)
    logger = logging.getLogger("boten")
    config = boten.core.get_config(init=conf_file)
    sqs_conn = sqs.connect_to_region(config['config']['aws_region'])

    queue = sqs_conn.get_queue(config['config']['queue_name'])
    bots = init_bots()
    logger.info('bots loaded [{}]'.format(",".join(bots.keys())))
    while True:
        logger.info('polling for new job')
        with utils.poll_sqs(queue) as payload:
            logger.info('Got new job')
            bot_name = payload['command'][1:]
            if payload['token'] != config[bot_name]['slack_token']:
                logger.warning('Got unauthorized slack command')
                logger.warning(payload)
                continue
            payload['subcommand'] = payload['text'].partition(' ')[0]
            payload['args'] = payload['text'].partition(' ')[2]
            p = multiprocessing.Process(target=run_payload, args=(bots[bot_name], payload, logger))
            p.start()
Ejemplo n.º 27
0
 def __init__(self, crawler):
     self.crawler = crawler
     self.aws_access_key_id = crawler.settings.get('AWS_ACCESS_KEY')
     self.aws_secret_key = crawler.settings.get('AWS_SECRET_KEY')
     self.region = crawler.settings.get('AWS_REGION')
     self.queue_names = crawler.settings.get('SQS_QUEUE_NAMES')
     self.raise_if_outstanding = crawler.settings.get('RAISE_IF_QUEUE_NOT_EMPTY')
     if not self.aws_access_key_id:
         raise ValueError('please set AWS_ACCESS_KEY in settings')
     elif not self.aws_secret_key:
         raise ValueError('please set AWS_SECRET_KEY in settings')
     elif not self.region:
         raise ValueError('please set AWS_REGION in settings')
     elif not self.queue_names:
         raise ValueError('please set SQS_QUEUE_NAMES in settings')
     self.connection = sqs.connect_to_region(self.region,
                                             aws_access_key_id=self.aws_access_key_id,
                                             aws_secret_access_key=self.aws_secret_key)
     self.queues = deque([(self.connection.get_queue(q), q) for q in self.queue_names])
     if self.raise_if_outstanding:
         if any([q.count() for q, name in self.queues]):
             raise CloseSpider('There are still outstanding items in (a) queue.')
Ejemplo n.º 28
0
    def boto_put(self):

        if not self.aws_region:
            raise ValueError('aws_region')

        queue_name = 'hammermq-test'

        connection = sqs.connect_to_region(self.aws_region)
        queue = connection.create_queue(queue_name)
        queue.set_message_class(Message)

        # stays around 28mb

        for count in itertools.count(1):
            with time_elapsed('Loop %d' % count):

                message = Message()
                message.set_body('test')

                result = queue.write(message)
                assert result
                print 'Memory:', memory_usage()
Ejemplo n.º 29
0
    def __init__(
        self,
        name,
        region,
        aws_access_key_id=None,
        aws_secret_access_key=None,
        debug=False,
        kwargs=None
    ):
        self.settings = kwargs
        self.pending = []

        if kwargs.region not in [r.name for r in sqs.regions()]:
            Log.error("Can not find region {{region}} in {{regions}}", region=kwargs.region, regions=[r.name for r in sqs.regions()])

        conn = sqs.connect_to_region(
            region_name=unwrap(kwargs.region),
            aws_access_key_id=unwrap(kwargs.aws_access_key_id),
            aws_secret_access_key=unwrap(kwargs.aws_secret_access_key),
        )
        self.queue = conn.get_queue(kwargs.name)
        if self.queue == None:
            Log.error("Can not find queue with name {{queue}} in region {{region}}", queue=kwargs.name, region=kwargs.region)
Ejemplo n.º 30
0
    def __send_alert(self, paths, detail={}):
        if self.disable_alerts:
            return

        try:
            body = {
                "truncated": detail.get("truncated", False),
                "paths": paths if len(paths) <= 10 else paths[0:9],
                "recovered": detail.get("recovered", False),
                "missingFiles": len(paths),
                "stackTrace": traceback.extract_stack(),
                "timestamp": "%s" % datetime.utcnow(),
                "queryId": detail.get("", None),
                "taskId": detail.get("", None),
                "hostname": platform.node(),
                "username": getpass.getuser(),
                "queryType": "DSE Platform Lib",
                "jobId": detail.get("jobId", None),
                "attemptId": detail.get("attemptId", None),
                "email": detail.get("email", None),
                "dataovenId": detail.get("dataovenId", None),
                "logFile": detail.get("logFile", None),
                "inputFile": detail.get("inputFile", None),
                "genieId": detail.get("genieId", None),
                "epoch": self.__time_now(),
            }

            message = RawMessage()
            message.set_body(body)

            conn = sqs.connect_to_region("us-east-1")
            queue = conn.get_queue("s3mper-alert-queue")

            queue.write(message)

        except Exception as e:
            print e
Ejemplo n.º 31
0
def get_input_queue():
    conn = sqs.connect_to_region(settings.SQS_REGION)
    queue = conn.get_queue(settings.INPUT_QUEUE)
    return queue
Ejemplo n.º 32
0
def create_queue():
    """Creates the SQS queue and returns the connection/queue"""
    conn = sqs.connect_to_region(CONFIG['region'])
    queue = conn.create_queue(QUEUE_NAME)
    queue.set_timeout(60 * 60)  # one hour
    return conn, queue