def __init__(self, init_value=0, drift_target_weights=None, change_rate_weights=None, move_freq_weights=None): """ Args: init_value (float): drift_target_weights (list): a list of 2-tuple weights change_rate_weights (list): a list of 2-tuple weights move_freq_weights (list): a list of 2-tuple weights """ # TODO: improve docs # Set up amplitude self._raw_value = None self.value = init_value if drift_target_weights is None: self.drift_target_weights = [ (-1, 1), (0.02, 6), (0.2, 1), (0.3, 0)] else: self.drift_target_weights = drift_target_weights self.drift_target = rand.weighted_rand(self.drift_target_weights) if change_rate_weights is None: self.change_rate_weights = [(0.00001, 100), (0.001, 5), (0.01, 1)] else: self.change_rate_weights = change_rate_weights self.change_rate = rand.weighted_rand(self.change_rate_weights) if move_freq_weights is None: self.move_freq_weights = [(0.000001, 10), (0.01, 1)] else: self.move_freq_weights = move_freq_weights self.move_freq = rand.weighted_rand(self.move_freq_weights)
def re_roll_behaviors(self): """ Re-randomly-generate behavior attributes. Returns: None """ self.move_freq = rand.weighted_rand(self.move_freq_weights) self.drift_target = rand.weighted_rand(self.drift_target_weights) self.change_rate = rand.weighted_rand(self.change_rate_weights)
def get(self): """ Get an ``int`` value in the probability space of the object. Returns: int """ return rand.weighted_rand(self.weights, round_result=True)
def apply_noise(self, noise_weights=None, uniform_amount=0.1): """ Add noise to every link in the network. Can use either a ``uniform_amount`` or a ``noise_weight`` weight profile. If ``noise_weight`` is set, ``uniform_amount`` will be ignored. Args: noise_weights (Optional[(amount, weight)]): a list of weights describing the noise to be added to each link uniform_amount (float): the maximum amount of uniform noise to be applied if ``noise_weights`` is not set Returns: None """ # Main node loop for node in self.node_list: for link in node.link_list: if noise_weights is not None: noise_amount = round(weighted_rand(noise_weights), 3) else: noise_amount = round(random.uniform( 0, link.weight * uniform_amount), 3) link.weight += noise_amount
def get(self): """ Get a ``float`` value in the probability space of the object. Returns: float: A value between the lowest and highest outcomes in ``self.weights`` """ return rand.weighted_rand(self.weights, round_result=False)
def build_chunk(oscillators): """ Build an audio chunk and progress the oscillator states. Args: oscillators (list): A list of oscillator.Oscillator objects to build chunks from Returns: str: a string of audio sample bytes ready to be written to a wave file """ step_random_processes(oscillators) subchunks = [] for osc in oscillators: osc.amplitude.step_amp() osc_chunk = osc.get_samples(config.CHUNK_SIZE) if osc_chunk is not None: subchunks.append(osc_chunk) if len(subchunks): new_chunk = sum(subchunks) else: new_chunk = numpy.zeros(config.CHUNK_SIZE) # If we exceed the maximum amplitude, handle it gracefully chunk_amplitude = amplitude.find_amplitude(new_chunk) if chunk_amplitude > config.MAX_AMPLITUDE: # Normalize the amplitude chunk to mitigate immediate clipping new_chunk = amplitude.normalize_amplitude(new_chunk, config.MAX_AMPLITUDE) # Pick some of the offending oscillators (and some random others) # and lower their drift targets avg_amp = (sum(osc.amplitude.value for osc in oscillators) / len(oscillators)) for osc in oscillators: if (osc.amplitude.value > avg_amp and rand.prob_bool(0.1) or rand.prob_bool(0.01)): osc.amplitude.drift_target = rand.weighted_rand( [(-5, 1), (0, 10)]) osc.amplitude.change_rate = rand.weighted_rand( osc.amplitude.change_rate_weights) return new_chunk.astype(config.SAMPLE_DATA_TYPE).tostring()
def build_chunk(oscillators): """ Build an audio chunk and progress the oscillator states. Args: oscillators (list): A list of oscillator.Oscillator objects to build chunks from Returns: str: a string of audio sample bytes ready to be written to a wave file """ step_random_processes(oscillators) subchunks = [] for osc in oscillators: osc.amplitude.step_amp() osc_chunk = osc.get_samples(config.CHUNK_SIZE) if osc_chunk is not None: subchunks.append(osc_chunk) if len(subchunks): new_chunk = sum(subchunks) else: new_chunk = numpy.zeros(config.CHUNK_SIZE) # If we exceed the maximum amplitude, handle it gracefully chunk_amplitude = amplitude.find_amplitude(new_chunk) if chunk_amplitude > config.MAX_AMPLITUDE: # Normalize the amplitude chunk to mitigate immediate clipping new_chunk = amplitude.normalize_amplitude(new_chunk, config.MAX_AMPLITUDE) # Pick some of the offending oscillators (and some random others) # and lower their drift targets avg_amp = (sum(osc.amplitude.value for osc in oscillators) / len(oscillators)) for osc in oscillators: if (osc.amplitude.value > avg_amp and rand.prob_bool(0.1) or rand.prob_bool(0.01)): osc.amplitude.drift_target = rand.weighted_rand([(-5, 1), (0, 10)]) osc.amplitude.change_rate = rand.weighted_rand( osc.amplitude.change_rate_weights) return new_chunk.astype(config.SAMPLE_DATA_TYPE).tostring()
def __init__(self, init_value, drift_target_weights=None, change_rate_weights=None): """ Args: init_value (float): The initial amplitude level drift_target_weights (list): a list of 2-tuple weights change_rate_weights (list): a list of 2-tuple weights """ # Set up amplitude self._raw_value = None self.value = init_value if drift_target_weights is None: self.drift_target_weights = [(-1, 1), (0.02, 6), (0.2, 1), (0.3, 0)] else: self.drift_target_weights = drift_target_weights self.drift_target = rand.weighted_rand(self.drift_target_weights) if change_rate_weights is None: self.change_rate_weights = [(0.00001, 100), (0.001, 5), (0.01, 1)] else: self.change_rate_weights = change_rate_weights self.change_rate = rand.weighted_rand(self.change_rate_weights)
def apply_noise(self, noise_weights=None, uniform_amount=0.1): """ Add noise to every link in the network. Can use either a ``uniform_amount`` or a ``noise_weight`` weight profile. If ``noise_weight`` is set, ``uniform_amount`` will be ignored. Args: noise_weights (list): a list of weight tuples of form ``(float, float)`` corresponding to ``(amount, weight)`` describing the noise to be added to each link in the graph uniform_amount (float): the maximum amount of uniform noise to be applied if ``noise_weights`` is not set Returns: None Example: >>> from blur.markov.node import Node >>> node_1 = Node('One') >>> node_2 = Node('Two') >>> node_1.add_link(node_1, 3) >>> node_1.add_link(node_2, 5) >>> node_2.add_link(node_1, 1) >>> graph = Graph([node_1, node_2]) >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format(link.target.value, link.weight)) One 3 Two 5 >>> graph.apply_noise() >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format( ... link.target.value, link.weight)) # doctest: +SKIP One 3.154 Two 5.321 """ # Main node loop for node in self.node_list: for link in node.link_list: if noise_weights is not None: noise_amount = round(weighted_rand(noise_weights), 3) else: noise_amount = round( random.uniform(0, link.weight * uniform_amount), 3) link.weight += noise_amount
def apply_noise(self, noise_weights=None, uniform_amount=0.1): """ Add noise to every link in the network. Can use either a ``uniform_amount`` or a ``noise_weight`` weight profile. If ``noise_weight`` is set, ``uniform_amount`` will be ignored. Args: noise_weights (list): a list of weight tuples of form ``(float, float)`` corresponding to ``(amount, weight)`` describing the noise to be added to each link in the graph uniform_amount (float): the maximum amount of uniform noise to be applied if ``noise_weights`` is not set Returns: None Example: >>> from blur.markov.node import Node >>> node_1 = Node('One') >>> node_2 = Node('Two') >>> node_1.add_link(node_1, 3) >>> node_1.add_link(node_2, 5) >>> node_2.add_link(node_1, 1) >>> graph = Graph([node_1, node_2]) >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format(link.target.value, link.weight)) One 3 Two 5 >>> graph.apply_noise() >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format( ... link.target.value, link.weight)) # doctest: +SKIP One 3.154 Two 5.321 """ # Main node loop for node in self.node_list: for link in node.link_list: if noise_weights is not None: noise_amount = round(weighted_rand(noise_weights), 3) else: noise_amount = round(random.uniform( 0, link.weight * uniform_amount), 3) link.weight += noise_amount
def step_random_processes(oscillators): """ Args: oscillators (list): A list of oscillator.Oscillator objects to operate on Returns: None """ if not rand.prob_bool(0.01): return amp_bias_weights = [(0.001, 1), (0.1, 100), (0.15, 40), (1, 0)] # Find out how many oscillators should move num_moves = iching.get_hexagram('NAIVE') % len(oscillators) for i in range(num_moves): pair = [gram % len(oscillators) for gram in iching.get_hexagram('THREE COIN')] amplitudes = [(gram / 64) * rand.weighted_rand(amp_bias_weights) for gram in iching.get_hexagram('THREE COIN')] oscillators[pair[0]].amplitude.drift_target = amplitudes[0] oscillators[pair[1]].amplitude.drift_target = amplitudes[1]
def test_normal_distribution(self): """ Test the accuracy of ``rand.normal_distribution()``. Use the curve it creates to generate a large number of samples, and then calculate the real variance and mean of the resulting sample group and compare the two within a comfortable margin. """ MEAN = -12 VARIANCE = 2.5 STANDARD_DEVIATION = math.sqrt(VARIANCE) SAMPLE_COUNT = 1000 curve = rand.normal_distribution(MEAN, VARIANCE, weight_count=30) samples = [rand.weighted_rand(curve) for i in range(SAMPLE_COUNT)] samples_mean = sum(samples) / len(samples) samples_variance = (sum( (s - samples_mean)**2 for s in samples) / len(samples)) mean_diff = abs(MEAN - samples_mean) variance_diff = abs(VARIANCE - samples_variance) self.assertLess(mean_diff, abs(MEAN / 4)) self.assertLess(variance_diff, abs(VARIANCE / 4))
def test_weighted_rand_with_arbitrary_curve(self): """ Test ``rand.weighted_rand()``. Find a large number of points from a randomly built weight distribution and comparing the distribution against the expectation using a crude histogram model. """ MIN_X = -1000 MIN_Y = 0 MAX_X = 1000 MAX_Y = 1000 curve = [(random.randint(MIN_X, MAX_X), random.randint(MIN_Y, MAX_Y)) for i in range(30)] # Attach points to domain bounds at MIN_Y curve.append((MIN_X, MIN_Y)) curve.append((MAX_X, MIN_Y)) # Sort the points in the curve as this is a # requirement of _linear_interp() curve.sort(key=lambda p: p[0]) BIN_WIDTH = 1 bins = {b: 0 for b in range(MIN_X, MAX_X, BIN_WIDTH)} TEST_COUNT = 1000 for i in range(TEST_COUNT): point = rand.weighted_rand(curve, round_result=False) # Match the found point to the closest bin to the left bins[int(math.floor(point / BIN_WIDTH) * BIN_WIDTH)] += 1 # Make sure the binning is working as expected self.assertEqual(sum(values for i, values in bins.items()), TEST_COUNT, msg='This test itself is broken! ' 'Not all rolled points were matched to a bin.') sum_probability = 0 for bin_x in bins.keys(): sum_probability += rand._linear_interp(curve, bin_x) for bin_x, count in bins.items(): bin_probability = rand._linear_interp(curve, bin_x) expected_count = (bin_probability / sum_probability) * TEST_COUNT self.assertLess(abs(count - expected_count), TEST_COUNT / 10)
def get(self): """ Render the poem as an HTML string. Returns: str: the body of the poem in HTML """ if rand.prob_bool(self.mutable_chance): # Render text from a markov graph derived from the source text word_list = [] word_count = rand.weighted_rand( self.word_count_weights, round_result=True) word_graph = Graph.from_file(self.filepath, self.distance_weights) for i in range(word_count): word = word_graph.pick().get_value() word_list.append(word) else: # Otherwise, copy source contents literally source_file = open(self.filepath, 'r') word_list = source_file.read().split() # Combine words, process markups, and return HTML return self.render_markups(word_list)
def step_random_processes(oscillators): """ Args: oscillators (list): A list of oscillator.Oscillator objects to operate on Returns: None """ if not rand.prob_bool(0.01): return amp_bias_weights = [(0.001, 1), (0.1, 100), (0.15, 40), (1, 0)] # Find out how many oscillators should move num_moves = iching.get_hexagram('NAIVE') % len(oscillators) for i in range(num_moves): pair = [ gram % len(oscillators) for gram in iching.get_hexagram('THREE COIN') ] amplitudes = [(gram / 64) * rand.weighted_rand(amp_bias_weights) for gram in iching.get_hexagram('THREE COIN')] oscillators[pair[0]].amplitude.drift_target = amplitudes[0] oscillators[pair[1]].amplitude.drift_target = amplitudes[1]
def test_normal_distribution(self): """ Test the accuracy of ``rand.normal_distribution()``. Use the curve it creates to generate a large number of samples, and then calculate the real variance and mean of the resulting sample group and compare the two within a comfortable margin. """ MEAN = -12 VARIANCE = 2.5 STANDARD_DEVIATION = math.sqrt(VARIANCE) SAMPLE_COUNT = 600 curve = rand.normal_distribution(MEAN, VARIANCE) samples = [rand.weighted_rand(curve) for i in range(SAMPLE_COUNT)] samples_mean = sum(samples) / len(samples) samples_variance = ( sum((s - samples_mean) ** 2 for s in samples) / len(samples) ) mean_diff = abs(MEAN - samples_mean) variance_diff = abs(VARIANCE - samples_variance) self.assertLess(mean_diff, abs(MEAN / 5)) self.assertLess(variance_diff, abs(VARIANCE / 5))
from pprint import pformat from random import randint from blur import rand # import basic (manually entered) data on the poems from __poems_basic_preconfig import poems if __name__ == '__main__': for poem in poems: # Decide the likelihood that a poem will be markov-ed on view # Manually entered data had mutable chances of 0, 0.5, and 1 # For 3 basic categories of preference # (All but poem eight have a chance to be mutable) if poem['mutable_chance'] == 0 and poem['name'] != 'eight': poem['mutable_chance'] = rand.weighted_rand([(0, 100), (0.03, 10), (0.15, 0)]) elif poem['mutable_chance'] == 0.5: poem['mutable_chance'] = rand.weighted_rand( rand.normal_distribution(0.5, 0.8, 0, 1)) else: poem['mutable_chance'] = rand.weighted_rand([(0.85, 0), (0.9, 10), (1, 100)]) poem['position_weight'] = rand.weighted_rand( rand.normal_distribution(poem['position_weight'], 3)) # Build distance weights for markov graph derivation keys = [ rand.weighted_rand(rand.normal_distribution(4, 30), True) for i in range(randint(10, 20)) ] distance_weights = dict([ (key,
detune_base_pitches_weights = [(frequency_map[10], 50), (frequency_map[0], 1), (frequency_map[2], 30), (frequency_map[3], 40), (frequency_map[5], 80), (frequency_map[7], 30), (frequency_map[9], 20)] octave_choice_weights = [(1/8, 20), (1/4, 15), (1/2, 10), (1, 5), (2, 5), (4, 5)] # Find detuned pitches pitches = [((rand.weighted_choice(detune_base_pitches_weights) + # Base pitch rand.weighted_rand(detune_weights)) * # Detune rand.weighted_choice(octave_choice_weights)) # Set Octave for i in range(50)] amp_multiplier_weights = [(0.05, 10), (0.2, 2), (0.7, 1)] for pitch in pitches: osc_list.append( oscillator.Oscillator( pitch, amplitude.AmplitudeHandler( init_value=0, drift_target_weights=[ (-2, 30), (0.02, 8), (0.05, 2), (0.1, 0.1), (0.3, 0)], change_rate_weights=[ (0.00001, 12000), (0.0001, 100),
def test_weighted_rand_and_choice_with_one_weight_equivalent(self): weight_list = [('The Only Weight', 2)] self.assertEqual(rand.weighted_rand(weight_list), rand.weighted_choice(weight_list))
def test_weighted_rand_with_one_weight_returns_it(self): weight_list = [('The Only Weight', 2)] expected_result = weight_list[0][0] self.assertEqual(rand.weighted_rand(weight_list), expected_result)
def render_markups(self, word_list): """ Render a list of words and markups to html with automatic line breaks. This method performs several processing steps preparing the poem text for HTML delivery. It: * Converts `---` to stochastic length dashes in the form of empty `<span class="variable-length-dash"></span>` tags * Converts `|||` to stochastic height line breaks in the form of empty `<span class="variable-height-break"></span>` tags * Spontaneously inserts horizontal blank space between words in the form of empty `<span class="horizontal-blank-space"></span>` tags * Calculates the position of line breaks and renders them as divs in the form `<div class="poem-line"> ... </div>` Line breaks are triggered after every word which exceeds `LINE_LENGTH`. This character limit ignores HTML tags, allowing lines containing spans (variable-length-dash or horizontal-blank-space) to intentionally visually exceed the apparent right edge of the poem. Args: word_list (list[str]): The list of words (as well as punctuation marks and markups) to render. Returns: str: The contents of `word_list` rendered as HTML """ working_text = word_list[:] # Copy of word_list to avoid side-effects lines = [] # List of lines in the generated poem current_line = [] # List of words in the current line visible_char_count = 0 # Number of visible chars in current line for word in working_text: if word == '---': # Render triple dashes to variable length visible dashes # (in the form of inline-block spans) dash_length = rand.weighted_rand(self.dash_length_weights) word = variable_length_dash(dash_length) elif word == '|||': # Render triple pipes as variable height breaks # (in the form of fixed-height spans) y_gap = rand.weighted_rand( self.y_gap_height_weights) word = variable_height_break(y_gap) else: # Otherwise, the word will be rendered literally as visible # text, so count it toward the visible character count used # in placing line breaks visible_char_count += len(word) # Roll to insert x-axis gaps if rand.prob_bool(self.x_gap_freq): x_gap = rand.weighted_rand(self.x_gap_length_weights) # Sometimes place space before word, sometimes after (50/50) word = horizontal_blank_space(x_gap) + word # Break lines when LINE_LENGTH is exceeded if visible_char_count > LINE_LENGTH: visible_char_count = 0 lines.append(''.join(current_line)) current_line = [] # Handle spaces appropriately for punctuation marks if word in PUNCTUATIONS: current_line.append(word) else: current_line.append(' ' + word) # Attach final line if current_line: lines.append(''.join(current_line)) return (''.join((surround_with_tag(line, 'div', 'class="poem-line"') for line in lines)))
def __init__(self, filename, immutable_id, title='', mutable_chance=None, position_weight=None, distance_weights=None, word_count_weights=None, gap_before_weights=None, left_pad_weights=None, x_gap_freq_weights=None, x_gap_length_weights=None, y_gap_height_weights=None, dash_length_weights=None, ): """ Args: filename (str): Name of text file containing the poem source located in `SOURCE_DIR` title (str): Title of the poem immutable_id (int): The immutable unique ID of this poem. Unlike `title`, this should not be subject to change by random processes. mutable_chance (float): 0-1 probability to be mutable distance_weights (dict): Dict of distance weights to be used in rand.from_file() position_weight (list[tuple]): Weight for position in the book order. Higher values relative to the values in the other poems indicates a stronger likelihood to appear near the beginning word_count_weights (list[tuple]): List of weight tuples for how many words will appear in the poem if the poem is mutable gap_before_weights (list[tuple]): List of weight tuples for how much space should appear before the poem, in em's. left_pad_weights (list[tuple]): List of weight tuples for how far the poem should be padded on the left, in device-width %. x_gap_freq_weights (list[tuple]): List of weight tuples for how frequently x-axis gaps should be inserted between words in the rendered poem. On initialization, this value is used to calculate `self.x_gap_freq`, which is the 0-1 probability for an x-axis gap to be inserted between any two given words. x_gap_length_weights (list[tuple): List of weight tuples for the length of inserted x-axis gaps, in em's. y_gap_height_weights (list[tuple): List of weight tuples for how tall inserted y-axis gaps should be, in em's. dash_length_weights (list[tuple]): List of weight tuples for the length of dashes triggered by `---` marks in the source text. """ self.immutable_id = immutable_id self.title = title self.filepath = os.path.join(SOURCE_DIR, filename) self.mutable_chance = (mutable_chance if mutable_chance else _default_mutable_chance) self.distance_weights = (distance_weights if distance_weights else _default_distance_weights) self.position_weight = (position_weight if position_weight else _default_position_weight) self.word_count_weights = (word_count_weights if word_count_weights else _default_word_count_weights) self.x_gap_length_weights = (x_gap_length_weights if x_gap_length_weights else _default_x_gap_length_weights) self.y_gap_height_weights = (y_gap_height_weights if y_gap_height_weights else _default_y_gap_height_weights) self.dash_length_weights = (dash_length_weights if dash_length_weights else _default_dash_length_weights) # Some args are used to calculate attributes on init self.gap_before = rand.weighted_rand( gap_before_weights if gap_before_weights else _default_gap_before_weights) self.left_pad = rand.weighted_rand( left_pad_weights if left_pad_weights else _default_left_pad_weights) self.x_gap_freq = rand.weighted_rand( x_gap_freq_weights if x_gap_freq_weights else _default_x_gap_freq_weights)
(0.3, 1), (0.4, 0) ]))) # Initialize softer oscillators slightly out of tune with consonant pitches detune_weights = rand.normal_distribution(0, 20) detune_base_pitches_weights = [(frequency_map[10], 50), (frequency_map[0], 1), (frequency_map[2], 30), (frequency_map[3], 40), (frequency_map[5], 80), (frequency_map[7], 30), (frequency_map[9], 20)] octave_choice_weights = [(1 / 8, 20), (1 / 4, 15), (1 / 2, 10), (1, 5), (2, 5), (4, 5)] # Find detuned pitches pitches = [ (( rand.weighted_choice(detune_base_pitches_weights) + # Base pitch rand.weighted_rand(detune_weights)) * # Detune rand.weighted_choice(octave_choice_weights)) # Set Octave for i in range(50) ] amp_multiplier_weights = [(0.05, 10), (0.2, 2), (0.7, 1)] for pitch in pitches: osc_list.append( oscillator.Oscillator( pitch, amplitude.AmplitudeHandler( init_value=0, drift_target_weights=[(-2, 30), (0.02, 8), (0.05, 2), (0.1, 0.1), (0.3, 0)], change_rate_weights=[(0.00001, 12000), (0.0001, 100), (0.001, 10)],