def call_run(self): if self.store_type == store_kinesis.StoreKinesis: self.run_kinesis() elif self.store_type == store_firehose.StoreFirehose: self.run_firehose() else: print('Unsupported store type: ' + str(self.store_type)) self.store.start_storing() self.store.start() # Start the remote hub # TODO: run boto commands to spawn an EC2 instance that # runs the remote hub m = MHMessage(self.hub_queue_up) m[constants.ATTR_COMMAND] = constants.START_OPERATING m[constants.ATTR_STREAM_NAME] = self.stream_name self.hub_queue_up.write(m) # Message loop while True: m = self.hub_queue_down.read(visibility_timeout=None, wait_time_seconds=1) if m is None: continue self.hub_queue_down.delete_message(m) # m = MHMessage(raw_mess.get_body()) #if constants.VERBOSE: print('hub_remote: received SQS message: ' + str(m.get_body())) command = m[constants.ATTR_COMMAND] if command == constants.PLAY_SOUND: audio_out = AudioOut(m[constants.ATTR_SOUND_FILE], m[constants.ATTR_VOLUME_LEVEL]) audio_out.play()
for f in files: basefile = os.path.basename(f) mic = basefile.split("_")[0] bucket_name = BUCKET_PREFIX+mic.lower() try: print "Checking bucket: ", bucket_name bucket = check_s3_bucket_exists(s3cxn, bucket_name) except Exception: if options.create_buckets == True: print "Creating bucket: ", bucket_name s3cxn.create_bucket(bucket_name) else: sys.exit(errno.ENFILE) bucket = s3cxn.get_bucket(bucket) key = bucket.get_key(basefile) exists = (key is not None) if exists == True: print "Key exists - skipping upload" else: print "Uploading: ", f s3_multipart_upload.main(f, bucket_name) if options.donotqueue is False: m = MHMessage() m['input_file'] = os.path.basename(f) m['bucket'] = bucket_name print "Queueing message" , m.get_body(), " ==> ", options.queue q.write(m) else : print "Skipping message queueing"