Esempio n. 1
0
def queue_upload_events_for_reprocessing(events, use_kinesis=False):
	if settings.ENV_AWS or use_kinesis:
		from hsreplaynet.utils.aws.streams import fill_stream_from_iterable
		iterable = _generate_raw_uploads_from_events(events)
		publisher_func = aws.publish_raw_upload_to_processing_stream
		stream_name = settings.KINESIS_UPLOAD_PROCESSING_STREAM_NAME
		fill_stream_from_iterable(stream_name, iterable, publisher_func)
	else:
		for event in events:
			logger.info("Processing UploadEvent %r locally", event)
			event.process()
Esempio n. 2
0
def queue_upload_events_for_reprocessing(events, use_kinesis=False):
	if settings.ENV_AWS or use_kinesis:
		from hsreplaynet.utils.aws.streams import fill_stream_from_iterable
		iterable = _generate_raw_uploads_from_events(events)
		publisher_func = aws.publish_raw_upload_batch_to_processing_stream
		stream_name = settings.KINESIS_UPLOAD_PROCESSING_STREAM_NAME
		fill_stream_from_iterable(stream_name, iterable, publisher_func)
	else:
		for event in events:
			logger.info("Processing UploadEvent %r locally", event)
			event.process()
Esempio n. 3
0
def queue_raw_uploads_for_processing(attempt_reprocessing, limit=None):
	"""
	Queue all raw logs to attempt processing them into UploadEvents.

	The primary use for this is for when we deploy code. The intended deploy process is:
		- Notify S3 to suspend triggering lambda upon log upload
		- Perform the Deploy
		- Notify S3 to resume triggering lambda upon log upload
		- Invoke this function to queue for processing any logs uploaded during the deploy

	This method is not intended to requeue uploads that have previously failed.
	For that see the requeue_failed_* family of methods.
	"""
	from hsreplaynet.utils.aws.streams import fill_stream_from_iterable
	logger.info("Starting - Queue all raw uploads for processing")

	publisher_func = aws.publish_raw_upload_to_processing_stream
	iterable = generate_raw_uploads_for_processing(attempt_reprocessing, limit)
	stream_name = settings.KINESIS_UPLOAD_PROCESSING_STREAM_NAME
	fill_stream_from_iterable(stream_name, iterable, publisher_func)
Esempio n. 4
0
def queue_raw_uploads_for_processing(attempt_reprocessing, limit=None):
	"""
	Queue all raw logs to attempt processing them into UploadEvents.

	The primary use for this is for when we deploy code. The intended deploy process is:
		- Notify S3 to suspend triggering lambda upon log upload
		- Perform the Deploy
		- Notify S3 to resume triggering lambda upon log upload
		- Invoke this function to queue for processing any logs uploaded during the deploy

	This method is not intended to requeue uploads that have previously failed.
	For that see the requeue_failed_* family of methods.
	"""
	from hsreplaynet.utils.aws.streams import fill_stream_from_iterable
	logger.info("Starting - Queue all raw uploads for processing")

	publisher_func = aws.publish_raw_upload_batch_to_processing_stream
	iterable = generate_raw_uploads_for_processing(attempt_reprocessing, limit)
	stream_name = settings.KINESIS_UPLOAD_PROCESSING_STREAM_NAME
	fill_stream_from_iterable(stream_name, iterable, publisher_func)