DUMP_PREVIOUS_EXPORTS = False SHOW_DEBUG_DATA = False START_SERVER = False SHOW_IMAGES = True SAVE_IMAGES = True TRAIN_NNET = True CONTINUE_TRAINING = False SAVE_INTERMEDIARY = False EXPLORE_LAYER = 'o_0' NUM_VIZ_ROWS = 1 N = 1 SEED = config.get('seed', 325) HEIGHT = 500 WIDTH = 500 TRAINING_UPSAMPLE = 2 ### NEURAL NET CONSTANTS LEARNING_RATE = 0.005 NUM_TRAINING_EPOCHS = 150 BATCH_SIZE = 1024 FIRST_DENSE = 5 HIDDEN_STRUCTURE = [ ('sparse', False, 32, 2, False), ('bottleneck', False, 200, 16), # ('bottleneck',32,16), ('sparse', False, 44, 4, False),
import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 30 SEED = config.get('seed',0) HEIGHT = 500 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT STRING_COLORS = config.get( 'colors','aaaaaa-aaaaaa-aaaaaa-aaaaaa-aaaaaa') COLOR_DICT = { i:j for i,j in enumerate(STRING_COLORS.split('-')) } COLOR_DICT = {
import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 1 SEED = config.get('seed', 0) HEIGHT = 100 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT NUM_COLORS = 10 UPSCALE_COLORS = HEIGHT // (NUM_COLORS * 2) STRING_COLORS_1 = config.get('colors_1', 'aaaaaa-aaaaaa-aaaaaa-aaaaaa-aaaaaa') STRING_COLORS_2 = config.get('colors_2', 'aaaaaa-aaaaaa-aaaaaa-aaaaaa-aaaaaa') COLOR_DICT_1 = {i: j for i, j in enumerate(STRING_COLORS_1.split('-'))} COLOR_DICT_2 = {10 + i: j for i, j in enumerate(STRING_COLORS_2.split('-'))} COLOR_DICT = {
import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 100 SEED = config.get('seed', 0) HEIGHT = 1000 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT COLOR_DICT_1 = color.build_palette_from_state( config.get('string-color-3', c.DEFAULT_COLOR_STR_1)) COLOR_DICT_2 = color.build_palette_from_state( config.get('string-color-4', c.DEFAULT_COLOR_STR_1)) ### SETUP section print('SETUP SECTION') if N > 1: file.clear_export_folder()
import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz import matplotlib.pyplot as plt from scipy.ndimage import gaussian_filter ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') DUMP_PREVIOUS_EXPORTS = True START_SERVER = False SAVE_IMAGES = True N = 9 SEED = config.get('seed', 296) HEIGHT = 500 WIDTH = 500 SEGMENT_LENGTH = 40 PS = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] UPSCALE_FACTOR_Y = c.INSTA_SIZE // HEIGHT UPSCALE_FACTOR_X = c.INSTA_SIZE // WIDTH COLOR_STRING = config.get( 'color-string', 'First:0/2bff4b/3-1/00ffe1/3-2/00618e/3-3/bc09b6/3,Second:0/ff7b60/-1/ffe175/-2/f6ffa5/-3/ff002e/' ) ### SETUP section
def generate_full_image(color_string,seed): r.init_def_generator(seed) image = np.zeros((HEIGHT,WIDTH,3)) plots = [] loop_key = r.bind_generator() setup_key = r.bind_generator() p1 = m.MarkovModel( values=[0.1,-0.1], preference_matrix=data.str2mat('1 5, 5 1'), self_length=SEGMENT_LENGTH, parent_rkey=r.bind_generator_from(setup_key) ) p2 = m.MarkovModel( values=[-0.1, 0.1], preference_matrix=data.str2mat('1 5, 5 1'), self_length=SEGMENT_LENGTH, parent_rkey=r.bind_generator_from(setup_key) ) num_coefs = 12 vs = np.sin(np.linspace(0, 1, num_coefs) * np.pi * 2) * 0.1 p3 = m.SimpleProgression( values=vs, start_probs=0, self_length=[num_coefs], parent_rkey=r.bind_generator_from(loop_key) ) p = m.MarkovModel( values=[p1, p2, p3], start_probs=2, preference_matrix=data.str2mat( '0 1 2, 1 0 2, 1 1 4'), self_length=HEIGHT//SEGMENT_LENGTH+1, parent_rkey=r.bind_generator_from(setup_key) ) num_samples_1 = HEIGHT//2 sample_scale_1 = m.sample_markov_hierarchy(p, num_samples_1) sample_2 = m.sample_markov_hierarchy(p, num_samples_1) sample_3 = m.sample_markov_hierarchy(p, num_samples_1) # interpolation_h_1 = integrate_and_normalize(sample_scale_1,2) # interpolation_h_2 = integrate_and_normalize(sample_2,2) interpolation_color = integrate_and_normalize(sample_3,2) color_repo = color.build_color_repository(color_string) meta = color.get_meta_from_palette( color_repo['First'], keys=[0,1,2,3], meta_cast_function=int) print(meta) color_lines = compute_color_lines(color_repo,interpolation_color) print(color_lines.shape) # plt.plot(interpolation_h_1) # plt.plot(interpolation_h_2) # plt.plot(interpolation_color) # plt.show() scale_1_freq = r.choice_from(setup_key,config.get('scale-1-freq-options',[0.025])) scale_2_freq = r.choice_from(setup_key,config.get('scale-2-freq-options',[0.02])) scale_1_scale = r.choice_from(setup_key,config.get('scale-1-scale-options',[0.02])) scale_2_scale = r.choice_from(setup_key,config.get('scale-2-scale-options',[0.02])) num_sin_coeffs = r.choice_from(setup_key,config.get('num-sin-coefficients-options',[18])) f1_scale = r.choice_from(setup_key,config.get('f1-scale-options',[0.2])) f2_scale = r.choice_from(setup_key,config.get('f2-scale-options',[0.4])) f3_scale = r.choice_from(setup_key,config.get('f3-scale-options',[0.15])) for current_row in range(HEIGHT): loop_key = r.reset_key(loop_key) # self_length = SEGMENT_LENGTH+int(10*np.sin(np.pi*i*0.01)) self_length = SEGMENT_LENGTH # scale_1 = 0.1 * (1 - interpolation_h_1[current_row]) + 0.15 * interpolation_h_1[current_row] scale_1 = 0.1 + scale_1_scale * np.sin(np.pi * current_row * scale_1_freq ) scale_2 = 0.1 + scale_2_scale * np.sin(np.pi * current_row * scale_2_freq ) p1 = m.MarkovModel( values=[scale_1, -scale_2], preference_matrix=data.str2mat('1 5, 5 1'), self_length=self_length, parent_rkey=r.bind_generator_from(loop_key) ) p2 = m.MarkovModel( values=[-scale_1, scale_2], preference_matrix=data.str2mat('1 5, 5 1'), self_length=self_length, parent_rkey=r.bind_generator_from(loop_key) ) zeros = m.MarkovModel( values=[0,0], preference_matrix=data.str2mat('1 1, 1 1'), self_length=self_length*3, parent_rkey=r.bind_generator_from(loop_key) ) jumps = m.MarkovModel( values=[-0.5, 0.5], preference_matrix=data.str2mat('1 1, 1 1'), self_length=1, parent_rkey=r.bind_generator_from(loop_key) ) num_coefs = num_sin_coeffs vs = np.sin(np.linspace(0, 1, num_coefs) * np.pi * 2)*0.1 p3 = m.SimpleProgression( values=vs, start_probs=0, self_length=[num_coefs], parent_rkey=r.bind_generator_from(loop_key) ) p = m.MarkovModel( values=[p1, p2, p3, jumps, zeros], start_probs=2, preference_matrix=data.str2mat( '0 1 2 2 1, 1 0 2 2 1, 1 1 4 2 2, 1 1 2 0 0, 1 1 1 1 2'), self_length=WIDTH//SEGMENT_LENGTH+1, parent_rkey=r.bind_generator_from(loop_key) ) num_samples_1 = WIDTH//4 num_samples_2 = WIDTH//3 sample_x_up = m.sample_markov_hierarchy(p, num_samples_1) sample_x_down = m.sample_markov_hierarchy(p, num_samples_2) sample_x_up_int = data.integrate_series(sample_x_up,2,mean_influence=1) sample_x_down_int = data.integrate_series(sample_x_down,2,mean_influence=1) f1 = 0.5 + f1_scale * np.sin(np.pi * current_row * 0.002 ) f2 = -1 - f2_scale * np.sin(np.pi * current_row * 0.002 ) f3 = 0.3 + f3_scale * np.sin(np.pi * current_row * 0.001 ) sample_x_up_int = data.concat_signals( [sample_x_up_int]*4, [f1,f2,f1,f2]) sample_x_down_int = data.concat_signals( [sample_x_down_int,sample_x_down_int,sample_x_down_int], [f3, f1, f3]) sample_x_down_int = np.r_[sample_x_down_int[0],sample_x_down_int] # roll_distance = 500 + int((interpolation_h_2[current_row]-0.5)*250) # roll_distance = 500 + int(current_row) # print(roll_distance) # sample_x_down_int = np.roll(sample_x_down_int, roll_distance) sample_x = sample_x_up_int + sample_x_down_int interpolation_sequence = sample_x[:HEIGHT] interpolation_sequence = gaussian_filter(interpolation_sequence,sigma=1) interpolation_sequence -= np.min(interpolation_sequence) interpolation_sequence /= np.max(interpolation_sequence) # interpolation_sequence = data.ease_inout_sin(interpolation_sequence) interpolation_sequence *= 3 # interpolation_sequence *= 2 # print(interpolation_sequence) gradient = data.interpolate( color_lines[:,current_row,:], interpolation_sequence, value_influences=meta ) gradient = color.cam02_2_srgb(gradient) image[current_row] = gradient plots += [np.copy(interpolation_sequence)] image = data.upscale_nearest(image,ny=UPSCALE_FACTOR_Y,nx=UPSCALE_FACTOR_X) image[image<0] = 0 image[image>255] = 255 if SHOW_DEBUG_DATA is True: viz.animate_plots_y(plots) return image
import time import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 4 SEED = config.get('seed', 0) HEIGHT = 100 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT START_COLOR = 'ff0000' END_COLOR = '0000ff' ### SETUP section print('SETUP SECTION') if N > 1: file.clear_export_folder() ### FUNCTIONS section print('FUNCTIONS SETUP')
import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 200 SEED = config.get('seed', 0) HEIGHT = 500 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT STRING_COLORS_1 = config.get('colors_1', 'b3b4d4-e4f097-edd1ad-e9a6b4-ffe572') STRING_COLORS_2 = config.get('colors_2', 'b3b4d4-e4f097-edd1ad-e9a6b4-ffe572') STRING_COLORS_3 = config.get('colors_3', 'b3b4d4-e4f097-edd1ad-e9a6b4-ffe572') COLOR_DICT_1 = {i: j for i, j in enumerate(STRING_COLORS_1.split('-'))} COLOR_DICT_2 = {i: j for i, j in enumerate(STRING_COLORS_2.split('-'))} COLOR_DICT_3 = {i: j for i, j in enumerate(STRING_COLORS_3.split('-'))}
def generate_patch(height, width, color_dict, rkey): patch = np.zeros((height, width, 3), dtype='float64') color_start_lengths = np.array( [int(i) for i in color.get_meta_from_palette(color_dict)]) num_color_samples = width // np.min(color_start_lengths) + 20 color_codes = color.get_keys_from_palette(color_dict) pattern = m.FuzzyProgression(values=color_codes, positive_shifts=3, negative_shifts=3, self_length=num_color_samples, parent_rkey=r.bind_generator_from(rkey)) sample_raw_start = m.sample_markov_hierarchy( pattern, num_color_samples).astype('int32') sample_raw_down_start = m.sample_markov_hierarchy( pattern, num_color_samples).astype('int32') # print(sample_raw_start) sample_raw_end = m.sample_markov_hierarchy( pattern, num_color_samples).astype('int32') sample_raw_down_end = m.sample_markov_hierarchy( pattern, num_color_samples).astype('int32') sample_raw_backup = m.sample_markov_hierarchy( pattern, num_color_samples).astype('int32') # making the probability of same color used smaller replace_mask = sample_raw_start == sample_raw_end sample_raw_end[replace_mask] = sample_raw_backup[replace_mask] sample_start = color.replace_indices_with_colors(sample_raw_start, color_dict) sample_end = color.replace_indices_with_colors(sample_raw_end, color_dict) sample_down_start = color.replace_indices_with_colors( sample_raw_down_start, color_dict) sample_down_end = color.replace_indices_with_colors( sample_raw_down_end, color_dict) switch_key = r.bind_generator_from(rkey) switch = np.array([ r.choice_from(switch_key, [0, 1], replace=False, size=(2, )) for i in range(sample_start.shape[0]) ]) sample_start_t = np.where(switch[:, 0][:, None], sample_start, sample_end) sample_end_t = np.where(switch[:, 1][:, None], sample_start, sample_end) sample_start = sample_start_t sample_end = sample_end_t start_lengths = color.get_meta_for_each_sample(sample_raw_start, color_dict) start_lengths = np.array([int(i) for i in start_lengths]) start_lengths = np.cumsum(start_lengths) num_vertical_reps = 2 num_vertical_samples = height // num_vertical_reps + 3 model = m.MarkovModel( values=np.arange(0, 41, 10) - 20, preference_matrix=data.str2mat( '0 1 5 1 0, 1 2 5 1 0, 0 1 10 1 0, 0 1 5 2 1, 0 1 5 1 0'), self_length=num_vertical_samples, parent_rkey=r.bind_generator_from(rkey)) offsets = np.stack([ m.sample_markov_hierarchy(model, num_vertical_samples) for _ in range(num_color_samples) ], axis=1) offsets = np.repeat(offsets, repeats=r.choice_from( rkey, [num_vertical_reps + i for i in range(1)], size=(num_vertical_samples, )), axis=0) offsets = np.cumsum(offsets, axis=0) offsets += start_lengths offsets = np.hstack([np.zeros((offsets.shape[0], 1)), offsets]) i = 0 offset_index = 0 transition = np.linspace(0, 1, num_vertical_samples) sample_start_gradient = sample_start[:, :, None] * ( 1 - transition) + sample_down_start[:, :, None] * transition sample_end_gradient = sample_end[:, :, None] * ( 1 - transition) + sample_down_end[:, :, None] * transition multiples_choices = r.choice_from(rkey, config.get('multiples-choices', [20, 30, 40, 50]), size=(6, )) # print('multiples-choices',multiples_choices) while i < height: loop_key = r.bind_generator_from(rkey) current_lengths = offsets[offset_index] acum_max = np.maximum.accumulate(current_lengths) mask = acum_max == current_lengths diff = np.diff(current_lengths[mask]) samples_start_masked = sample_start[mask[1:]] samples_end_masked = sample_end[mask[1:]] # # samples_start_masked = sample_start_gradient[:,:,i//num_vertical_reps][mask[1:]] # samples_end_masked = sample_end_gradient[:,:,i//num_vertical_reps][mask[1:]] p_switch = config.get('gradient-switch-p', 0.5) switch = r.choice_from(loop_key, [0, 1], size=samples_start_masked.shape[0], p=[p_switch, 1 - p_switch]) switch = np.stack((switch, 1 - switch), axis=1) sample_start_switched = np.where(switch[:, 0][:, None], samples_start_masked, samples_end_masked) sample_end_switched = np.where(switch[:, 1][:, None], samples_start_masked, samples_end_masked) multiples = r.choice_from(loop_key, multiples_choices) gradient = generate_gradient(sample_start_switched, sample_end_switched, diff)[:width] patch[i:i + multiples] = gradient[None, :] i += multiples offset_index += 1 patch[patch < 0] = 0 patch[patch > 255] = 255 return patch
import constants as c import script_config as config import numpy as np import cv2 import utils.generate_utils as gen import random_manager as r import utils.file_utils as file import utils.data_utils as data import utils.markov_utils as m import utils.color_utils as color import utils.viz_utils as viz ### DATA/INPUT/SHARED by all runs section print('PREPARING DATA SECTION') N = 20 SEED = config.get('seed', 0) HEIGHT = 1000 WIDTH = HEIGHT UPSCALE_FACTOR = c.INSTA_SIZE // HEIGHT COLOR_STRING = config.get( 'color-string-3', 'Yellows:0/ffdbc9/20-1/ffebc4/20-2/f2ffcc/20-3/def9b8/20-4/ffdcba/20,Violets:10/d62a55/20-11/f97cbd/20-12/f48e7f/20-13/ffa29b/20-14/fc8c58/20,Blue:5/2098f9/20-6/2332ff/20-7/1980e8/20-8/0742f2/20-9/21f3ff/20' ) ### SETUP section print('SETUP SECTION') if N > 1: file.clear_export_folder()