def _encode_message(self, payload): payload_str = json.dumps(payload) message = boto.sqs.message.Message() message.set_body(payload_str) return message
def queue_build(github_organization, github_repo, commit_hash_or_tag): from bob import settings conn = boto.sqs.connect_to_region(settings['boto.region']) queue = conn.get_queue(settings['bobb.queue']) message = boto.sqs.message.RawMessage() message.set_body(json.dumps(dict( github_organization=github_organization, github_repo=github_repo, commit_hash_or_tag=commit_hash_or_tag ))) queue.write(message) return message.id
def queue_build(github_organization, github_repo, commit_hash_or_tag): from bob import settings conn = boto.sqs.connect_to_region(settings['boto.region']) queue = conn.get_queue(settings['bobb.queue']) message = boto.sqs.message.RawMessage() message.set_body( json.dumps( dict(github_organization=github_organization, github_repo=github_repo, commit_hash_or_tag=commit_hash_or_tag))) queue.write(message) return message.id
def __call__(self, *args, **kw): message = boto.sqs.message.Message() message.set_body(json.dumps((args, kw))) if not self.queue.write(message): raise AssertionError("Failed to send message")
ifshare_propriedades['arquivo'], bucket) uploader.put() uploader = MultipartUploader(arquivo_raw + ".ifshare", os.path.getsize(arquivo_raw + ".ifshare"), ifshare_propriedades['arquivo'] + ".ifshare", bucket, message="Subindo metadados") uploader.put() s3connection.close() print("Arquivos no bucket. Atualizando fila...") sqsConnection = boto.sqs.connect_to_region( CONFIG['Incoming']['REGION'], aws_access_key_id=CONFIG['Incoming']['ACCESS_KEY'], aws_secret_access_key=CONFIG['Incoming']['SECRET_KEY']) myQueue = sqsConnection.lookup(CONFIG['Incoming']['QUEUE']) if myQueue is None: print("Criando fila de arquivos a processar...") myQueue = sqsConnection.create_queue(CONFIG['Incoming']['QUEUE']) else: print("Fila de arquivos a processar ja existe") message = boto.sqs.message.Message() message.set_body(ifshare_propriedades['arquivo'] + '.ifshare') myQueue.write(message) sqsConnection.close()
ifshare_propriedades['arquivo'], bucket) uploader.put() uploader = MultipartUploader(arquivo_raw + ".ifshare", os.path.getsize(arquivo_raw + ".ifshare"), ifshare_propriedades['arquivo'] + ".ifshare", bucket, message="Subindo metadados") uploader.put() s3connection.close() print("Arquivos no bucket. Atualizando fila...") sqsConnection = boto.sqs.connect_to_region(CONFIG['Incoming']['REGION'], aws_access_key_id=CONFIG['Incoming']['ACCESS_KEY'], aws_secret_access_key=CONFIG['Incoming']['SECRET_KEY']) myQueue = sqsConnection.lookup(CONFIG['Incoming']['QUEUE']) if myQueue is None: print("Criando fila de arquivos a processar...") myQueue = sqsConnection.create_queue(CONFIG['Incoming']['QUEUE']) else: print("Fila de arquivos a processar ja existe") message = boto.sqs.message.Message() message.set_body(ifshare_propriedades['arquivo'] + '.ifshare') myQueue.write(message) sqsConnection.close()
def send_message(self, body): self._connect_to_queue() message = boto.sqs.message.RawMessage() message.set_body(body) self.queue.write(message)