def test_bucket_sizes():
	r = fakeredis.FakeStrictRedis()
	td = timedelta(days=1)
	current_ts = datetime.utcnow()
	yesterday_ts = current_ts - td

	# Test bucket sizes between 15 minutes and 6 hours in 15 minute increments
	for bucket_size in range(900, 21600, 900):
		dist = RedisPopularityDistribution(
			r, "DECKS", namespace="test", bucket_size=bucket_size
		)
		start_token = dist._to_start_token(current_ts)
		end_token = dist._to_end_token(current_ts)
		next_start_token = dist._next_token(start_token)
		assert end_token + 1 == next_start_token
		assert end_token - start_token == bucket_size - 1
		assert dist._convert_to_end_token(start_token) == end_token

		yesterday_start_token = dist._to_start_token(yesterday_ts)
		buckets = dist._generate_bucket_tokens_between(yesterday_start_token, end_token)

		# Assert the first bucket contains the start token
		first_bucket = buckets[0]
		assert first_bucket[0] == yesterday_start_token

		# Assert the last bucket matches the end token
		last_bucket = buckets[-1]
		assert last_bucket[1] == end_token

		# Assert the total number of buckets matches the expected number
		expected_num_buckets = ceil((end_token - yesterday_start_token) / bucket_size)
		assert len(buckets) == expected_num_buckets
def test_one_second_buckets():
	r = fakeredis.FakeStrictRedis()
	bucket_size = 1
	distribution = RedisPopularityDistribution(
		r,
		"DECKS",
		namespace="test",
		ttl=3600,
		bucket_size=bucket_size
	)
	current_ts = datetime.utcnow()
	td = timedelta(microseconds=current_ts.microsecond)
	t_0 = current_ts - td
	t_0_token = int(t_0.timestamp())

	start_token = distribution._to_start_token(t_0)
	assert start_token == t_0_token

	end_token = distribution._to_end_token(t_0)
	assert end_token == t_0_token

	end_token_from_start_token = distribution._convert_to_end_token(start_token)
	assert end_token_from_start_token == end_token

	next_token = distribution._next_token(start_token)
	assert next_token == (start_token + bucket_size)

	t_3 = t_0 + timedelta(seconds=3)
	buckets = distribution._generate_bucket_tokens_between(
		start_token,
		distribution._to_end_token(t_3)
	)
	for index, bucket in enumerate(buckets):
		assert bucket[0] == t_0_token + index
		assert bucket[1] == bucket[0] + (bucket_size - 1)