def test_ps_notification_events(): """ test set/get/delete of notification on specific events""" zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications events = "OBJECT_CREATE,OBJECT_DELETE" notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, events) _, status = notification_conf.set_config() assert_equal(status/100, 2) # get notification result, _ = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 1) assert_equal(parsed_result['topics'][0]['topic']['name'], topic_name) assert_not_equal(len(parsed_result['topics'][0]['events']), 0) # TODO add test for invalid event name # cleanup notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_notification_events(): """ test set/get/delete of notification on specific events""" zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications events = "OBJECT_CREATE,OBJECT_DELETE" notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, events) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # get notification result, _ = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 1) assert_equal(parsed_result['topics'][0]['topic']['name'], topic_name) assert_not_equal(len(parsed_result['topics'][0]['events']), 0) # TODO add test for invalid event name # cleanup notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_notification(): """ test set/get/delete of notification """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # get notification result, _ = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 1) assert_equal(parsed_result['topics'][0]['topic']['name'], topic_name) # delete notification _, status = notification_conf.del_config() assert_equal(status/100, 2) # TODO: deletion cannot be verified via GET # result, _ = notification_conf.get_config() # parsed_result = json.loads(result) # assert_equal(parsed_result['Code'], 'NoSuchKey') # cleanup topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_notification(): """ test set/get/delete of notification """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # get notification result, _ = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 1) assert_equal(parsed_result['topics'][0]['topic']['name'], topic_name) # delete notification _, status = notification_conf.del_config() assert_equal(status / 100, 2) result, status = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 0) # TODO should return 404 # assert_equal(status, 404) # cleanup topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_event_fetching(): """ test incremental fetching of events from a subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 100 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) max_events = 15 total_events_count = 0 next_marker = None all_events = [] while True: # get the events from the subscription result, _ = sub_conf.get_events(max_events, next_marker) parsed_result = json.loads(result) events = parsed_result['events'] total_events_count += len(events) all_events.extend(events) next_marker = parsed_result['next_marker'] for event in events: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') if next_marker == '': break keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(all_events, keys, exact_match=False) # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_event_fetching(): """ test incremental fetching of events from a subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 100 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) max_events = 15 total_events_count = 0 next_marker = None all_events = [] while True: # get the events from the subscription result, _ = sub_conf.get_events(max_events, next_marker) parsed_result = json.loads(result) events = parsed_result['events'] total_events_count += len(events) all_events.extend(events) next_marker = parsed_result['next_marker'] for event in events: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') if next_marker == '': break keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(all_events, keys, exact_match=False) # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_creation_triggers(): """ test object creation notifications in using put/copy/post """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket using PUT key = bucket.new_key('put') key.set_contents_from_string('bar') # create objects in the bucket using COPY bucket.copy_key('copy', bucket.name, key.name) # create objects in the bucket using multi-part upload fp = tempfile.TemporaryFile(mode='w') fp.write('bar') fp.close() uploader = bucket.initiate_multipart_upload('multipart') fp = tempfile.TemporaryFile(mode='r') uploader.upload_part_from_file(fp, 1) uploader.complete_upload() fp.close() # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event key: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: verify the specific 3 keys: 'put', 'copy' and 'multipart' assert len(parsed_result['events']) >= 3 # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_creation_triggers(): """ test object creation notifications in using put/copy/post """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket using PUT key = bucket.new_key('put') key.set_contents_from_string('bar') # create objects in the bucket using COPY bucket.copy_key('copy', bucket.name, key.name) # create objects in the bucket using multi-part upload fp = tempfile.TemporaryFile(mode='w') fp.write('bar') fp.close() uploader = bucket.initiate_multipart_upload('multipart') fp = tempfile.TemporaryFile(mode='r') uploader.upload_part_from_file(fp, 1) uploader.complete_upload() fp.close() # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event key: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: verify the specific 3 keys: 'put', 'copy' and 'multipart' assert len(parsed_result['events']) >= 3 # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_versioned_deletion(): """ test notification of deletion markers """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) bucket.configure_versioning(True) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, "OBJECT_DELETE") _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket key = bucket.new_key('foo') key.set_contents_from_string('bar') v1 = key.version_id key.set_contents_from_string('kaboom') v2 = key.version_id # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # set delete markers bucket.delete_key(key.name, version_id=v2) bucket.delete_key(key.name, version_id=v1) # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event key: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: verify the specific events assert len(parsed_result['events']) >= 2 # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_versioned_deletion(): """ test notification of deletion markers """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) bucket.configure_versioning(True) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, "OBJECT_DELETE") _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket key = bucket.new_key('foo') key.set_contents_from_string('bar') v1 = key.version_id key.set_contents_from_string('kaboom') v2 = key.version_id # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # set delete markers bucket.delete_key(key.name, version_id=v2) bucket.delete_key(key.name, version_id=v1) # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event key: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: verify the specific events assert len(parsed_result['events']) >= 2 # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification(): """ test s3 notification set/get/delete """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name topic_conf = PSTopic(ps_zones[0].conn, topic_name) response, status = topic_conf.set_config() assert_equal(status/100, 2) parsed_result = json.loads(response) assert_equal(parsed_result['arn'], topic_arn) # create one s3 notification notification_name1 = bucket_name + NOTIFICATION_SUFFIX + '_1' s3_notification_conf1 = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name1, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf1.set_config() assert_equal(status/100, 2) # create another s3 notification notification_name2 = bucket_name + NOTIFICATION_SUFFIX + '_2' s3_notification_conf2 = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name2, topic_arn, ['s3:ObjectCreated:*', 's3:ObjectRemoved:*']) response, status = s3_notification_conf2.set_config() assert_equal(status/100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get all notification on a bucket response, status = s3_notification_conf1.get_config() assert_equal(status/100, 2) assert_equal(len(response['TopicConfigurations']), 2) assert_equal(response['TopicConfigurations'][0]['TopicArn'], topic_arn) assert_equal(response['TopicConfigurations'][1]['TopicArn'], topic_arn) # get specific notification on a bucket response, status = s3_notification_conf1.get_config(all_notifications=False) assert_equal(status/100, 2) assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn) assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name1) response, status = s3_notification_conf2.get_config(all_notifications=False) assert_equal(status/100, 2) assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn) assert_equal(response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name2) # delete specific notifications _, status = s3_notification_conf1.del_config(all_notifications=False) assert_equal(status/100, 2) _, status = s3_notification_conf2.del_config(all_notifications=False) assert_equal(status/100, 2) # cleanup topic_conf.del_config() # delete the bucket zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification_records(): """ test s3 records fetching """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status / 100, 2) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf.set_config() assert_equal(status / 100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get auto-generated subscription sub_conf = PSSubscription(ps_zones[0].conn, notification_name, topic_name) _, status = sub_conf.get_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for record in parsed_result['Records']: log.debug(record) keys = list(bucket.list()) # TODO: set exact_match to true verify_s3_records_by_elements(parsed_result['Records'], keys, exact_match=False) # cleanup _, status = s3_notification_conf.del_config() topic_conf.del_config() # delete the keys for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_push_amqp(): """ test pushing to amqp endpoint """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status / 100, 2) # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription( ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name, endpoint='amqp://localhost', endpoint_args='amqp-exchange=ex1&amqp-ack-level=none') _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification_records(): """ test s3 records fetching """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status/100, 2) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf.set_config() assert_equal(status/100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get auto-generated subscription sub_conf = PSSubscription(ps_zones[0].conn, notification_name, topic_name) _, status = sub_conf.get_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for record in parsed_result['Records']: log.debug(record) keys = list(bucket.list()) # TODO: set exact_match to true verify_s3_records_by_elements(parsed_result['Records'], keys, exact_match=False) # cleanup _, status = s3_notification_conf.del_config() topic_conf.del_config() # delete the keys for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_push_amqp(): """ test pushing to amqp endpoint """ return SkipTest("PubSub push tests are only manual") zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status/100, 2) # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name, endpoint='amqp://localhost', endpoint_args='amqp-exchange=ex1&amqp-ack-level=none') _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_s3_push_amqp(): """ test pushing to amqp endpoint n s3 record format""" zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name, endpoint='amqp://localhost', endpoint_args='amqp-exchange=ex1&amqp-ack-level=none') _, status = topic_conf.set_config() assert_equal(status / 100, 2) # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) _, status = s3_notification_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check amqp receiver # cleanup s3_notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_s3_push_http(): """ test pushing to http endpoint n s3 record format""" return SkipTest("PubSub push tests are only manual") zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name, endpoint='http://localhost:9001') _, status = topic_conf.set_config() assert_equal(status/100, 2) # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) _, status = s3_notification_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check http server # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # TODO check http server # cleanup s3_notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_push_http(): """ test pushing to http endpoint """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status/100, 2) # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name, endpoint='http://localhost:9001') _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_topic(): """ test set/get/delete of topic """ _, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status / 100, 2) # get topic result, _ = topic_conf.get_config() # verify topic content parsed_result = json.loads(result) assert_equal(parsed_result['topic']['name'], topic_name) assert_equal(len(parsed_result['subs']), 0) # delete topic _, status = topic_conf.del_config() assert_equal(status / 100, 2) # verift topic is deleted result, _ = topic_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['Code'], 'NoSuchKey')
def test_ps_topic(): """ test set/get/delete of topic """ _, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status/100, 2) # get topic result, _ = topic_conf.get_config() # verify topic content parsed_result = json.loads(result) assert_equal(parsed_result['topic']['name'], topic_name) assert_equal(len(parsed_result['subs']), 0) # delete topic _, status = topic_conf.del_config() assert_equal(status/100, 2) # verift topic is deleted result, _ = topic_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['Code'], 'NoSuchKey')
def test_ps_subscription(): """ test set/get/delete of subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # get the subscription result, _ = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], topic_name) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the delete events from the subscriptions result, _ = sub_conf.get_events() for event in parsed_result['events']: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: check deletions # verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # we should see the creations as well as the deletions # delete subscription _, status = sub_conf.del_config() assert_equal(status/100, 2) result, _ = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], '') # TODO should return "no-key" instead # assert_equal(parsed_result['Code'], 'NoSuchKey') # cleanup notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_event_type_subscription(): """ test subscriptions for different events """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create topic for objects creation topic_create_name = bucket_name+TOPIC_SUFFIX+'_create' topic_create_conf = PSTopic(ps_zones[0].conn, topic_create_name) topic_create_conf.set_config() # create topic for objects deletion topic_delete_name = bucket_name+TOPIC_SUFFIX+'_delete' topic_delete_conf = PSTopic(ps_zones[0].conn, topic_delete_name) topic_delete_conf.set_config() # create topic for all events topic_name = bucket_name+TOPIC_SUFFIX+'_all' topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # create notifications for objects creation notification_create_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_create_name, "OBJECT_CREATE") _, status = notification_create_conf.set_config() assert_equal(status/100, 2) # create notifications for objects deletion notification_delete_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_delete_name, "OBJECT_DELETE") _, status = notification_delete_conf.set_config() assert_equal(status/100, 2) # create notifications for all events notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, "OBJECT_DELETE,OBJECT_CREATE") _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription for objects creation sub_create_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX+'_create', topic_create_name) _, status = sub_create_conf.set_config() assert_equal(status/100, 2) # create subscription for objects deletion sub_delete_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX+'_delete', topic_delete_name) _, status = sub_delete_conf.set_config() assert_equal(status/100, 2) # create subscription for all events sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX+'_all', topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the events from the creation subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE): objname: "' + str(event['info']['key']['name']) + \ '" type: "' + str(event['event']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # get the events from the deletions subscription result, _ = sub_delete_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + \ '" type: "' + str(event['event']) + '"') assert_equal(len(parsed_result['events']), 0) # get the events from the all events subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE,OBJECT_DELETE): objname: "' + \ str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) log.debug("Event (OBJECT_DELETE) synced") # get the events from the creations subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE): objname: "' + str(event['info']['key']['name']) + \ '" type: "' + str(event['event']) + '"') # deletions should not change the creation events # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # get the events from the deletions subscription result, _ = sub_delete_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + \ '" type: "' + str(event['event']) + '"') # only deletions should be listed here # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # get the events from the all events subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE,OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + \ '" type: "' + str(event['event']) + '"') # both deletions and creations should be here verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=False) # verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # TODO: (1) test deletions (2) test overall number of events # cleanup sub_create_conf.del_config() sub_delete_conf.del_config() sub_conf.del_config() notification_create_conf.del_config() notification_delete_conf.del_config() notification_conf.del_config() topic_create_conf.del_config() topic_delete_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_event_acking(): """ test acking of some events in a subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) events = parsed_result['events'] original_number_of_events = len(events) for event in events: log.debug('Event (before ack) id: "' + str(event['id']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(events, keys, exact_match=False) # ack half of the events events_to_ack = number_of_objects / 2 for event in events: if events_to_ack == 0: break _, status = sub_conf.ack_events(event['id']) assert_equal(status / 100, 2) events_to_ack -= 1 # verify that acked events are gone result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (after ack) id: "' + str(event['id']) + '"') assert_equal(len(parsed_result['events']), original_number_of_events - number_of_objects / 2) # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_event_acking(): """ test acking of some events in a subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name+TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status/100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name+SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status/100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) events = parsed_result['events'] original_number_of_events = len(events) for event in events: log.debug('Event (before ack) id: "' + str(event['id']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(events, keys, exact_match=False) # ack half of the events events_to_ack = number_of_objects/2 for event in events: if events_to_ack == 0: break _, status = sub_conf.ack_events(event['id']) assert_equal(status/100, 2) events_to_ack -= 1 # verify that acked events are gone result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (after ack) id: "' + str(event['id']) + '"') assert_equal(len(parsed_result['events']), original_number_of_events - number_of_objects/2) # cleanup sub_conf.del_config() notification_conf.del_config() topic_conf.del_config() for key in bucket.list(): key.delete() zones[0].delete_bucket(bucket_name)
def test_ps_event_type_subscription(): """ test subscriptions for different events """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create topic for objects creation topic_create_name = bucket_name + TOPIC_SUFFIX + '_create' topic_create_conf = PSTopic(ps_zones[0].conn, topic_create_name) topic_create_conf.set_config() # create topic for objects deletion topic_delete_name = bucket_name + TOPIC_SUFFIX + '_delete' topic_delete_conf = PSTopic(ps_zones[0].conn, topic_delete_name) topic_delete_conf.set_config() # create topic for all events topic_name = bucket_name + TOPIC_SUFFIX + '_all' topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # create notifications for objects creation notification_create_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_create_name, "OBJECT_CREATE") _, status = notification_create_conf.set_config() assert_equal(status / 100, 2) # create notifications for objects deletion notification_delete_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_delete_name, "OBJECT_DELETE") _, status = notification_delete_conf.set_config() assert_equal(status / 100, 2) # create notifications for all events notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name, "OBJECT_DELETE,OBJECT_CREATE") _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription for objects creation sub_create_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX + '_create', topic_create_name) _, status = sub_create_conf.set_config() assert_equal(status / 100, 2) # create subscription for objects deletion sub_delete_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX + '_delete', topic_delete_name) _, status = sub_delete_conf.set_config() assert_equal(status / 100, 2) # create subscription for all events sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX + '_all', topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the events from the creation subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # get the events from the deletions subscription result, _ = sub_delete_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') assert_equal(len(parsed_result['events']), 0) # get the events from the all events subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE,OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) log.debug("Event (OBJECT_DELETE) synced") # get the events from the creations subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # deletions should not change the creation events # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # get the events from the deletions subscription result, _ = sub_delete_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # only deletions should be listed here # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # get the events from the all events subscription result, _ = sub_create_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event (OBJECT_CREATE,OBJECT_DELETE): objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # both deletions and creations should be here verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=False) # verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # TODO: (1) test deletions (2) test overall number of events # test subscription deletion when topic is specified _, status = sub_create_conf.del_config(topic=True) assert_equal(status / 100, 2) _, status = sub_delete_conf.del_config(topic=True) assert_equal(status / 100, 2) _, status = sub_conf.del_config(topic=True) assert_equal(status / 100, 2) # cleanup notification_create_conf.del_config() notification_delete_conf.del_config() notification_conf.del_config() topic_create_conf.del_config() topic_delete_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_subscription(): """ test set/get/delete of subscription """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_conf = PSTopic(ps_zones[0].conn, topic_name) topic_conf.set_config() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create notifications notification_conf = PSNotification(ps_zones[0].conn, bucket_name, topic_name) _, status = notification_conf.set_config() assert_equal(status / 100, 2) # create subscription sub_conf = PSSubscription(ps_zones[0].conn, bucket_name + SUB_SUFFIX, topic_name) _, status = sub_conf.set_config() assert_equal(status / 100, 2) # get the subscription result, _ = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], topic_name) # create objects in the bucket number_of_objects = 10 for i in range(number_of_objects): key = bucket.new_key(str(i)) key.set_contents_from_string('bar') # wait for sync zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the create events from the subscription result, _ = sub_conf.get_events() parsed_result = json.loads(result) for event in parsed_result['events']: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') keys = list(bucket.list()) # TODO: set exact_match to true verify_events_by_elements(parsed_result['events'], keys, exact_match=False) # delete objects from the bucket for key in bucket.list(): key.delete() # wait for sync zone_meta_checkpoint(ps_zones[0].zone) zone_bucket_checkpoint(ps_zones[0].zone, zones[0].zone, bucket_name) # get the delete events from the subscriptions result, _ = sub_conf.get_events() for event in parsed_result['events']: log.debug('Event: objname: "' + str(event['info']['key']['name']) + '" type: "' + str(event['event']) + '"') # TODO: check deletions # verify_events_by_elements(parsed_result['events'], keys, exact_match=False, deletions=True) # we should see the creations as well as the deletions # delete subscription _, status = sub_conf.del_config() assert_equal(status / 100, 2) result, status = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], '') # TODO should return 404 # assert_equal(status, 404) # cleanup notification_conf.del_config() topic_conf.del_config() zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification_low_level(): """ test low level implementation of s3 notifications """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status/100, 2) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX generated_topic_name = notification_name+'_'+topic_name topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf.set_config() assert_equal(status/100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get auto-generated topic generated_topic_conf = PSTopic(ps_zones[0].conn, generated_topic_name) result, status = generated_topic_conf.get_config() parsed_result = json.loads(result) assert_equal(status/100, 2) assert_equal(parsed_result['topic']['name'], generated_topic_name) # get auto-generated notification notification_conf = PSNotification(ps_zones[0].conn, bucket_name, generated_topic_name) result, status = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(status/100, 2) assert_equal(len(parsed_result['topics']), 1) # get auto-generated subscription sub_conf = PSSubscription(ps_zones[0].conn, notification_name, generated_topic_name) result, status = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(status/100, 2) assert_equal(parsed_result['topic'], generated_topic_name) # delete s3 notification _, status = s3_notification_conf.del_config(all_notifications=False) assert_equal(status/100, 2) # delete topic _, status = topic_conf.del_config() assert_equal(status/100, 2) # verify low-level cleanup _, status = generated_topic_conf.get_config() assert_equal(status, 404) result, status = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 0) # TODO should return 404 # assert_equal(status, 404) result, status = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], '') # TODO should return 404 # assert_equal(status, 404) # cleanup topic_conf.del_config() # delete the bucket zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification_low_level(): """ test low level implementation of s3 notifications """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_conf = PSTopic(ps_zones[0].conn, topic_name) _, status = topic_conf.set_config() assert_equal(status / 100, 2) # create s3 notification notification_name = bucket_name + NOTIFICATION_SUFFIX generated_topic_name = notification_name + '_' + topic_name topic_arn = 'arn:aws:sns:::' + topic_name s3_notification_conf = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf.set_config() assert_equal(status / 100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get auto-generated topic generated_topic_conf = PSTopic(ps_zones[0].conn, generated_topic_name) result, status = generated_topic_conf.get_config() parsed_result = json.loads(result) assert_equal(status / 100, 2) assert_equal(parsed_result['topic']['name'], generated_topic_name) # get auto-generated notification notification_conf = PSNotification(ps_zones[0].conn, bucket_name, generated_topic_name) result, status = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(status / 100, 2) assert_equal(len(parsed_result['topics']), 1) # get auto-generated subscription sub_conf = PSSubscription(ps_zones[0].conn, notification_name, generated_topic_name) result, status = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(status / 100, 2) assert_equal(parsed_result['topic'], generated_topic_name) # delete s3 notification _, status = s3_notification_conf.del_config(all_notifications=False) assert_equal(status / 100, 2) # delete topic _, status = topic_conf.del_config() assert_equal(status / 100, 2) # verify low-level cleanup _, status = generated_topic_conf.get_config() assert_equal(status, 404) result, status = notification_conf.get_config() parsed_result = json.loads(result) assert_equal(len(parsed_result['topics']), 0) # TODO should return 404 # assert_equal(status, 404) result, status = sub_conf.get_config() parsed_result = json.loads(result) assert_equal(parsed_result['topic'], '') # TODO should return 404 # assert_equal(status, 404) # cleanup topic_conf.del_config() # delete the bucket zones[0].delete_bucket(bucket_name)
def test_ps_s3_notification(): """ test s3 notification set/get/delete """ zones, ps_zones = init_env() bucket_name = gen_bucket_name() # create bucket on the first of the rados zones bucket = zones[0].create_bucket(bucket_name) # wait for sync zone_meta_checkpoint(ps_zones[0].zone) topic_name = bucket_name + TOPIC_SUFFIX # create topic topic_name = bucket_name + TOPIC_SUFFIX topic_arn = 'arn:aws:sns:::' + topic_name topic_conf = PSTopic(ps_zones[0].conn, topic_name) response, status = topic_conf.set_config() assert_equal(status / 100, 2) parsed_result = json.loads(response) assert_equal(parsed_result['arn'], topic_arn) # create one s3 notification notification_name1 = bucket_name + NOTIFICATION_SUFFIX + '_1' s3_notification_conf1 = PSNotificationS3(ps_zones[0].conn, bucket_name, notification_name1, topic_arn, ['s3:ObjectCreated:*']) response, status = s3_notification_conf1.set_config() assert_equal(status / 100, 2) # create another s3 notification notification_name2 = bucket_name + NOTIFICATION_SUFFIX + '_2' s3_notification_conf2 = PSNotificationS3( ps_zones[0].conn, bucket_name, notification_name2, topic_arn, ['s3:ObjectCreated:*', 's3:ObjectRemoved:*']) response, status = s3_notification_conf2.set_config() assert_equal(status / 100, 2) zone_meta_checkpoint(ps_zones[0].zone) # get all notification on a bucket response, status = s3_notification_conf1.get_config() assert_equal(status / 100, 2) assert_equal(len(response['TopicConfigurations']), 2) assert_equal(response['TopicConfigurations'][0]['TopicArn'], topic_arn) assert_equal(response['TopicConfigurations'][1]['TopicArn'], topic_arn) # get specific notification on a bucket response, status = s3_notification_conf1.get_config( all_notifications=False) assert_equal(status / 100, 2) assert_equal( response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn) assert_equal( response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name1) response, status = s3_notification_conf2.get_config( all_notifications=False) assert_equal(status / 100, 2) assert_equal( response['NotificationConfiguration']['TopicConfiguration']['Topic'], topic_arn) assert_equal( response['NotificationConfiguration']['TopicConfiguration']['Id'], notification_name2) # delete specific notifications _, status = s3_notification_conf1.del_config(all_notifications=False) assert_equal(status / 100, 2) _, status = s3_notification_conf2.del_config(all_notifications=False) assert_equal(status / 100, 2) # cleanup topic_conf.del_config() # delete the bucket zones[0].delete_bucket(bucket_name)