def build_node_tree(cam, node_group): build_node_tree.cam = cam output = {} dat = {} output['renderpath_datas'] = [dat] path = 'build/compiled/Assets/renderpaths/' node_group_name = node_group.name.replace('.', '_') rn = get_root_node(node_group) if rn == None: return dat['name'] = node_group_name # Store main context names dat['mesh_context'] = 'mesh' dat['shadows_context'] = 'shadowmap' dat['render_targets'], dat['depth_buffers'] = preprocess_renderpath(rn, node_group) dat['stages'] = [] buildNode(dat['stages'], rn, node_group) asset_path = path + node_group_name + '.arm' armutils.write_arm(asset_path, output) assets.add(asset_path)
def build_node_trees(assets_path): s = bpy.data.filepath.split(os.path.sep) s.pop() fp = os.path.sep.join(s) os.chdir(fp) # Make sure Assets dir exists if not os.path.exists('build/compiled/Assets/renderpaths'): os.makedirs('build/compiled/Assets/renderpaths') build_node_trees.assets_path = assets_path # Always include assets.add(assets_path + 'brdf.png') assets.add_embedded_data('brdf.png') bpy.data.worlds['Arm'].rp_defs = '' # Export render path for each camera parsed_paths = [] for cam in bpy.data.cameras: # if cam.game_export if cam.renderpath_path not in parsed_paths: node_group = bpy.data.node_groups[cam.renderpath_path] build_node_tree(cam, node_group) parsed_paths.append(cam.renderpath_path)
def make_ssao_pass(stages, node_group, node): make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[3, 4], bind_target_constants=['gbufferD', 'gbuffer0'], shader_context='ssao_pass/ssao_pass/ssao_pass', viewport_scale=bpy.data.worlds['Arm'].generate_ssao_texture_scale) make_quad_pass( stages, node_group, node, target_index=2, bind_target_indices=[1, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x', viewport_scale=bpy.data.worlds['Arm'].generate_ssao_texture_scale) make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[2, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def build_node_tree(cam, node_group): build_node_tree.cam = cam output = {} dat = {} output['renderpath_datas'] = [dat] path = 'build/compiled/Assets/renderpaths/' node_group_name = node_group.name.replace('.', '_') rn = get_root_node(node_group) if rn == None: return dat['name'] = node_group_name # Store main context names dat['mesh_context'] = 'mesh' dat['shadows_context'] = 'shadowmap' dat['render_targets'], dat['depth_buffers'] = preprocess_renderpath( rn, node_group) dat['stages'] = [] buildNode(dat['stages'], rn, node_group) asset_path = path + node_group_name + '.arm' armutils.write_arm(asset_path, output) assets.add(asset_path)
def make_apply_ssao_pass(stages, node_group, node): make_quad_pass(stages, node_group, node, target_index=2, bind_target_indices=[4, 5], bind_target_constants=['gbufferD', 'gbuffer0'], shader_context='ssao_pass/ssao_pass/ssao_pass') make_quad_pass( stages, node_group, node, target_index=3, bind_target_indices=[2, 5], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x') make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[3, 5], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y_blend') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def make_ssao_reproject_pass(stages, node_group, node): make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[3, 4, 2, 5], bind_target_constants=['gbufferD', 'gbuffer0', 'slast', 'sveloc'], shader_context= 'ssao_reproject_pass/ssao_reproject_pass/ssao_reproject_pass') make_quad_pass( stages, node_group, node, target_index=2, bind_target_indices=[1, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x') make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[2, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def make_smaa_pass(stages, node_group, node): stage = {} stage['params'] = [] make_set_target(stage, node_group, node, target_index=2) stages.append(stage) stage = {} stage['params'] = [] make_clear_target(stage, color_val=[0.0, 0.0, 0.0, 0.0]) stages.append(stage) make_quad_pass(stages, node_group, node, target_index=None, bind_target_indices=[4], bind_target_constants=['colorTex'], shader_context='smaa_edge_detect/smaa_edge_detect/smaa_edge_detect') stage = {} stage['params'] = [] make_set_target(stage, node_group, node, target_index=3) stages.append(stage) stage = {} stage['params'] = [] make_clear_target(stage, color_val=[0.0, 0.0, 0.0, 0.0]) stages.append(stage) make_quad_pass(stages, node_group, node, target_index=None, bind_target_indices=[2], bind_target_constants=['edgesTex'], shader_context='smaa_blend_weight/smaa_blend_weight/smaa_blend_weight') make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[4, 3, 5], bind_target_constants=['colorTex', 'blendTex', 'sveloc'], shader_context='smaa_neighborhood_blend/smaa_neighborhood_blend/smaa_neighborhood_blend') assets.add(build_node_trees.assets_path + 'smaa_area.png') assets.add(build_node_trees.assets_path + 'smaa_search.png') assets.add_embedded_data('smaa_area.png') assets.add_embedded_data('smaa_search.png')
def make_draw_world(stage, node_group, node, dome=True): if dome: stage['command'] = 'draw_skydome' else: stage['command'] = 'draw_material_quad' # stage['params'].append(wname + '_material/' + wname + '_material/world') stage['params'].append('_worldMaterial') # Link to active world # Link assets if '_EnvClouds' in bpy.data.worlds['Arm'].world_defs: assets.add(build_node_trees.assets_path + 'noise256.png') assets.add_embedded_data('noise256.png')
def make_smaa_pass(stages, node_group, node): stage = {} stage['params'] = [] make_set_target(stage, node_group, node, target_index=2) stages.append(stage) stage = {} stage['params'] = [] make_clear_target(stage, color_val=[0.0, 0.0, 0.0, 0.0]) stages.append(stage) make_quad_pass( stages, node_group, node, target_index=None, bind_target_indices=[4], bind_target_constants=['colorTex'], shader_context='smaa_edge_detect/smaa_edge_detect/smaa_edge_detect') stage = {} stage['params'] = [] make_set_target(stage, node_group, node, target_index=3) stages.append(stage) stage = {} stage['params'] = [] make_clear_target(stage, color_val=[0.0, 0.0, 0.0, 0.0]) stages.append(stage) make_quad_pass( stages, node_group, node, target_index=None, bind_target_indices=[2], bind_target_constants=['edgesTex'], shader_context='smaa_blend_weight/smaa_blend_weight/smaa_blend_weight') make_quad_pass( stages, node_group, node, target_index=1, bind_target_indices=[4, 3, 5], bind_target_constants=['colorTex', 'blendTex', 'sveloc'], shader_context= 'smaa_neighborhood_blend/smaa_neighborhood_blend/smaa_neighborhood_blend' ) assets.add(build_node_trees.assets_path + 'smaa_area.png') assets.add(build_node_trees.assets_path + 'smaa_search.png') assets.add_embedded_data('smaa_area.png') assets.add_embedded_data('smaa_search.png')
def write_color_irradiance(base_name, col): # Constant color irradiance_floats = [col[0], col[1], col[2]] for i in range(0, 24): irradiance_floats.append(0.0) if not os.path.exists('build/compiled/Assets/envmaps'): os.makedirs('build/compiled/Assets/envmaps') output_file = 'build/compiled/Assets/envmaps/' + base_name + '_irradiance' sh_json = {} sh_json['irradiance'] = irradiance_floats armutils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm')
def write_color_irradiance(base_name, col): # Constant color irradiance_floats = [col[0], col[1], col[2]] for i in range(0, 24): irradiance_floats.append(0.0) envpath = 'build/compiled/Assets/envmaps' if not os.path.exists(envpath): os.makedirs(envpath) output_file = envpath + '/' + base_name + '_irradiance' sh_json = {} sh_json['irradiance'] = irradiance_floats armutils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm')
def write_output(output): # Add datas to khafile dir_name = 'world' # Append world defs wrd = bpy.data.worlds['Arm'] data_name = 'world' + wrd.world_defs + wrd.rp_defs # Reference correct shader context dat = output['material_datas'][0] dat['shader'] = data_name + '/' + data_name assets.add_shader2(dir_name, data_name) # Write material json path = 'build/compiled/Assets/materials/' asset_path = path + dat['name'] + '.arm' armutils.write_arm(asset_path, output) assets.add(asset_path)
def write_sky_irradiance(base_name): wrd = bpy.data.worlds['Arm'] if wrd.generate_radiance_sky_type == 'Hosek': # Hosek spherical harmonics irradiance_floats = [ 1.5519331988822218, 2.3352207154503266, 2.997277451988076, 0.2673894962434794, 0.4305630474135794, 0.11331825259716752, -0.04453633521758638, -0.038753175134160295, -0.021302768541875794, 0.00055858020486499, 0.000371654770334503, 0.000126606145406403, -0.000135708721978705, -0.000787399554583089, -0.001550090690860059, 0.021947399048903773, 0.05453650591711572, 0.08783641266630278, 0.17053593578630663, 0.14734127083304463, 0.07775404698816404, -2.6924363189795e-05, -7.9350169701934e-05, -7.559914435231e-05, 0.27035455385870993, 0.23122918445556914, 0.12158817295211832 ] for i in range(0, len(irradiance_floats)): irradiance_floats[i] /= 2 else: # Fake irradiance_floats = [ 0.5282714503101548, 0.6576873502619733, 1.0692444882409775, 0.17108712865136044, -0.08840906601412168, -0.5016437779078063, -0.05123227009753221, -0.06724088656181595, -0.07651659183264257, -0.09740705087869408, -0.19569235551561795, -0.3087497307203731, 0.056717192983076405, 0.1109186355691673, 0.20616582000220154, 0.013898321643280141, 0.05985657405787638, 0.12638202463080392, -0.003224443014484806, 0.013764449325286695, 0.04288850064700093, 0.1796545401960917, 0.21595731080039757, 0.29144356515614844, 0.10152875101705996, 0.2651761450155488, 0.4778582813756466 ] envpath = 'build/compiled/Assets/envmaps' if not os.path.exists(envpath): os.makedirs(envpath) output_file = envpath + '/' + base_name + '_irradiance' sh_json = {} sh_json['irradiance'] = irradiance_floats armutils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm')
def write_sky_irradiance(base_name): wrd = bpy.data.worlds['Arm'] if wrd.generate_radiance_sky_type == 'Hosek': # Hosek spherical harmonics irradiance_floats = [1.5519331988822218,2.3352207154503266,2.997277451988076,0.2673894962434794,0.4305630474135794,0.11331825259716752,-0.04453633521758638,-0.038753175134160295,-0.021302768541875794,0.00055858020486499,0.000371654770334503,0.000126606145406403,-0.000135708721978705,-0.000787399554583089,-0.001550090690860059,0.021947399048903773,0.05453650591711572,0.08783641266630278,0.17053593578630663,0.14734127083304463,0.07775404698816404,-2.6924363189795e-05,-7.9350169701934e-05,-7.559914435231e-05,0.27035455385870993,0.23122918445556914,0.12158817295211832] for i in range(0, len(irradiance_floats)): irradiance_floats[i] /= 2; else: # Fake irradiance_floats = [0.5282714503101548,0.6576873502619733,1.0692444882409775,0.17108712865136044,-0.08840906601412168,-0.5016437779078063,-0.05123227009753221,-0.06724088656181595,-0.07651659183264257,-0.09740705087869408,-0.19569235551561795,-0.3087497307203731,0.056717192983076405,0.1109186355691673,0.20616582000220154,0.013898321643280141,0.05985657405787638,0.12638202463080392,-0.003224443014484806,0.013764449325286695,0.04288850064700093,0.1796545401960917,0.21595731080039757,0.29144356515614844,0.10152875101705996,0.2651761450155488,0.4778582813756466] if not os.path.exists('build/compiled/Assets/envmaps'): os.makedirs('build/compiled/Assets/envmaps') output_file = 'build/compiled/Assets/envmaps/' + base_name + '_irradiance' sh_json = {} sh_json['irradiance'] = irradiance_floats armutils.write_arm(output_file + '.arm', sh_json) assets.add(output_file + '.arm')
def build_node_trees(assets_path): s = bpy.data.filepath.split(os.path.sep) s.pop() fp = os.path.sep.join(s) os.chdir(fp) # Make sure Assets dir exists if not os.path.exists('build/compiled/Assets/renderpaths'): os.makedirs('build/compiled/Assets/renderpaths') build_node_trees.assets_path = assets_path # Always include assets.add(assets_path + 'brdf.png') assets.add_embedded_data('brdf.png') # Export render path for each camera parsed_paths = [] for cam in bpy.data.cameras: # if cam.game_export if cam.renderpath_path not in parsed_paths: node_group = bpy.data.node_groups[cam.renderpath_path] build_node_tree(cam, node_group) parsed_paths.append(cam.renderpath_path)
def make_texture(image_node, tex_name, matname=None): wrd = bpy.data.worlds['Arm'] tex = {} tex['name'] = tex_name tex['file'] = '' image = image_node.image if matname == None: matname = mat_state.material.name if image == None: return None if image.filepath == '': log.warn(matname + '/' + image.name + ' - file path not found') return None # Reference image name tex['file'] = armutils.extract_filename(image.filepath) tex['file'] = armutils.safefilename(tex['file']) s = tex['file'].rsplit('.', 1) if len(s) == 1: log.warn(matname + '/' + image.name + ' - file extension required for image name') return None ext = s[1].lower() do_convert = ext != 'jpg' and ext != 'png' and ext != 'hdr' and ext != 'mp4' # Convert image if do_convert: tex['file'] = tex['file'].rsplit('.', 1)[0] + '.jpg' # log.warn(matname + '/' + image.name + ' - image format is not (jpg/png/hdr), converting to jpg.') if image.packed_file != None: # Extract packed data unpack_path = armutils.get_fp() + '/build/compiled/Assets/unpacked' if not os.path.exists(unpack_path): os.makedirs(unpack_path) unpack_filepath = unpack_path + '/' + tex['file'] if do_convert: if not os.path.isfile(unpack_filepath): armutils.write_image(image, unpack_filepath) # Write bytes if size is different or file does not exist yet elif os.path.isfile(unpack_filepath) == False or os.path.getsize( unpack_filepath) != image.packed_file.size: with open(unpack_filepath, 'wb') as f: f.write(image.packed_file.data) assets.add(unpack_filepath) else: if not os.path.isfile(armutils.safe_assetpath(image.filepath)): log.warn('Material ' + matname + '/' + image.name + ' - file not found(' + image.filepath + ')') return None if do_convert: converted_path = armutils.get_fp( ) + '/build/compiled/Assets/unpacked/' + tex['file'] # TODO: delete cache when file changes if not os.path.isfile(converted_path): armutils.write_image(image, converted_path) assets.add(converted_path) else: # Link image path to assets # TODO: Khamake converts .PNG to .jpg? Convert ext to lowercase on windows if armutils.get_os() == 'win': s = image.filepath.rsplit('.', 1) assets.add(armutils.safe_assetpath(s[0] + '.' + s[1].lower())) else: assets.add(armutils.safe_assetpath(image.filepath)) # if image_format != 'RGBA32': # tex['format'] = image_format interpolation = image_node.interpolation aniso = wrd.anisotropic_filtering_state if aniso == 'On': interpolation = 'Smart' elif aniso == 'Off' and interpolation == 'Smart': interpolation = 'Linear' # TODO: Blender seems to load full images on size request, cache size instead powimage = is_pow(image.size[0]) and is_pow(image.size[1]) # Pow2 required to generate mipmaps if powimage == True: if interpolation == 'Cubic': # Mipmap linear tex['mipmap_filter'] = 'linear' tex['generate_mipmaps'] = True elif interpolation == 'Smart': # Mipmap anisotropic tex['min_filter'] = 'anisotropic' tex['mipmap_filter'] = 'linear' tex['generate_mipmaps'] = True elif (image_node.interpolation == 'Cubic' or image_node.interpolation == 'Smart'): log.warn(matname + '/' + image.name + ' - power of 2 texture required for ' + image_node.interpolation + ' interpolation') if image_node.extension != 'REPEAT': # Extend or clip tex['u_addressing'] = 'clamp' tex['v_addressing'] = 'clamp' else: if state.target == 'html5' and powimage == False: log.warn( matname + '/' + image.name + ' - non power of 2 texture can not use repeat mode on HTML5 target' ) tex['u_addressing'] = 'clamp' tex['v_addressing'] = 'clamp' if image.source == 'MOVIE': # Just append movie texture trait for now movie_trait = {} movie_trait['type'] = 'Script' movie_trait['class_name'] = 'armory.trait.internal.MovieTexture' movie_trait['parameters'] = [tex['file']] for o in mat_state.mat_armusers[mat_state.material]: o['traits'].append(movie_trait) tex['source'] = 'movie' tex['file'] = '' # MovieTexture will load the video return tex
def parse_value(node, socket): if node.type == 'GROUP': if node.node_tree.name.startswith('Armory PBR'): # Displacement if socket == node.outputs[1]: res = parse_value_input(node.inputs[10]) if node.inputs[11].is_linked or node.inputs[11].default_value != 1.0: res = "({0} * {1})".format(res, parse_value_input(node.inputs[11])) return res else: return None else: return parse_group(node, socket) elif node.type == 'GROUP_INPUT': return parse_input_group(node, socket) elif node.type == 'ATTRIBUTE': # Pass time till drivers are implemented if node.attribute_name == 'time': curshader.add_uniform('float time', link='_time') return 'time' else: return None elif node.type == 'CAMERA': # View Z Depth if socket == node.outputs[1]: return 'gl_FragCoord.z' # View Distance else: return 'length(eyeDir)' elif node.type == 'FRESNEL': ior = parse_value_input(node.inputs[0]) #nor = parse_vectorZ_input(node.inputs[1]) return 'pow(1.0 - dotNV, 7.25 / {0})'.format(ior) # max(dotNV, 0.0) elif node.type == 'NEW_GEOMETRY': if socket == node.outputs[6]: # Backfacing return '0.0' elif socket == node.outputs[7]: # Pointiness return '0.0' elif node.type == 'HAIR_INFO': # Is Strand # Intercept # Thickness pass elif node.type == 'LAYER_WEIGHT': blend = parse_value_input(node.inputs[0]) # nor = parse_vector_input(node.inputs[1]) if socket == node.outputs[0]: # Fresnel return 'clamp(pow(1.0 - dotNV, (1.0 - {0}) * 10.0), 0.0, 1.0)'.format(blend) elif socket == node.outputs[1]: # Facing return '((1.0 - dotNV) * {0})'.format(blend) elif node.type == 'LIGHT_PATH': if socket == node.outputs[0]: # Is Camera Ray return '1.0' elif socket == node.outputs[0]: # Is Shadow Ray return '0.0' elif socket == node.outputs[0]: # Is Diffuse Ray return '1.0' elif socket == node.outputs[0]: # Is Glossy Ray return '1.0' elif socket == node.outputs[0]: # Is Singular Ray return '0.0' elif socket == node.outputs[0]: # Is Reflection Ray return '0.0' elif socket == node.outputs[0]: # Is Transmission Ray return '0.0' elif socket == node.outputs[0]: # Ray Length return '0.0' elif socket == node.outputs[0]: # Ray Depth return '0.0' elif socket == node.outputs[0]: # Transparent Depth return '0.0' elif socket == node.outputs[0]: # Transmission Depth return '0.0' elif node.type == 'OBJECT_INFO': if socket == node.outputs[0]: # Object Index return '0.0' elif socket == node.outputs[0]: # Material Index return '0.0' elif socket == node.outputs[0]: # Random return '0.0' elif node.type == 'PARTICLE_INFO': if socket == node.outputs[0]: # Index return '0.0' elif socket == node.outputs[1]: # Age return '0.0' elif socket == node.outputs[2]: # Lifetime return '0.0' elif socket == node.outputs[4]: # Size return '0.0' elif node.type == 'VALUE': return tovec1(node.outputs[0].default_value) elif node.type == 'WIREFRAME': #node.use_pixel_size # size = parse_value_input(node.inputs[0]) return '0.0' elif node.type == 'TEX_BRICK': return '0.0' elif node.type == 'TEX_CHECKER': # TODO: do not recompute when color socket is also connected curshader.add_function(functions.str_tex_checker) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) scale = parse_value_input(node.inputs[3]) return 'tex_checker({0}, {1}, {2}, {3}).r'.format(co, col1, col2, scale) elif node.type == 'TEX_GRADIENT': return '0.0' elif node.type == 'TEX_IMAGE': # Already fetched if res_var_name(node, node.outputs[0]) in parsed: return '{0}.a'.format(store_var_name(node)) tex_name = armutils.safe_source_name(node.name) tex = texture.make_texture(node, tex_name) if tex != None: return '{0}.a'.format(texture_store(node, tex, tex_name)) else: return '0.0' elif node.type == 'TEX_MAGIC': return '0.0' elif node.type == 'TEX_MUSGRAVE': # Fall back to noise curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'tex_noise_f({0} * {1})'.format(co, scale) elif node.type == 'TEX_NOISE': curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'tex_noise({0} * {1})'.format(co, scale) elif node.type == 'TEX_POINTDENSITY': return '0.0' elif node.type == 'TEX_VORONOI': curshader.add_function(functions.str_tex_voronoi) assets.add(armutils.get_sdk_path() + '/armory/Assets/' + 'noise64.png') assets.add_embedded_data('noise64.png') curshader.add_uniform('sampler2D snoise', link='_noise64') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) if node.coloring == 'INTENSITY': return 'tex_voronoi({0} * {1}).a'.format(co, scale) else: # CELLS return 'tex_voronoi({0} * {1}).r'.format(co, scale) elif node.type == 'TEX_WAVE': return '0.0' elif node.type == 'LIGHT_FALLOFF': return '0.0' elif node.type == 'NORMAL': nor = parse_vector_input(node.inputs[0]) return 'dot({0}, {1})'.format(tovec3(node.outputs[0].default_value), nor) elif node.type == 'VALTORGB': # ColorRamp return '1.0' elif node.type == 'MATH': val1 = parse_value_input(node.inputs[0]) val2 = parse_value_input(node.inputs[1]) op = node.operation if op == 'ADD': out_val = '({0} + {1})'.format(val1, val2) elif op == 'SUBTRACT': out_val = '({0} - {1})'.format(val1, val2) elif op == 'MULTIPLY': out_val = '({0} * {1})'.format(val1, val2) elif op == 'DIVIDE': out_val = '({0} / {1})'.format(val1, val2) elif op == 'SINE': out_val = 'sin({0})'.format(val1) elif op == 'COSINE': out_val = 'cos({0})'.format(val1) elif op == 'TANGENT': out_val = 'tan({0})'.format(val1) elif op == 'ARCSINE': out_val = 'asin({0})'.format(val1) elif op == 'ARCCOSINE': out_val = 'acos({0})'.format(val1) elif op == 'ARCTANGENT': out_val = 'atan({0})'.format(val1) elif op == 'POWER': out_val = 'pow({0}, {1})'.format(val1, val2) elif op == 'LOGARITHM': out_val = 'log({0})'.format(val1) elif op == 'MINIMUM': out_val = 'min({0}, {1})'.format(val1, val2) elif op == 'MAXIMUM': out_val = 'max({0}, {1})'.format(val1, val2) elif op == 'ROUND': # out_val = 'round({0})'.format(val1) out_val = 'floor({0} + 0.5)'.format(val1) elif op == 'LESS_THAN': out_val = 'float({0} < {1})'.format(val1, val2) elif op == 'GREATER_THAN': out_val = 'float({0} > {1})'.format(val1, val2) elif op == 'MODULO': # out_val = 'float({0} % {1})'.format(val1, val2) out_val = 'mod({0}, {1})'.format(val1, val2) elif op == 'ABSOLUTE': out_val = 'abs({0})'.format(val1) if node.use_clamp: return 'clamp({0}, 0.0, 1.0)'.format(out_val) else: return out_val elif node.type == 'RGBTOBW': col = parse_vector_input(node.inputs[0]) return '((({0}.r * 0.3 + {0}.g * 0.59 + {0}.b * 0.11) / 3.0) * 2.5)'.format(col) elif node.type == 'SEPHSV': return '0.0' elif node.type == 'SEPRGB': col = parse_vector_input(node.inputs[0]) if socket == node.outputs[0]: return '{0}.r'.format(col) elif socket == node.outputs[1]: return '{0}.g'.format(col) elif socket == node.outputs[2]: return '{0}.b'.format(col) elif node.type == 'SEPXYZ': vec = parse_vector_input(node.inputs[0]) if socket == node.outputs[0]: return '{0}.x'.format(vec) elif socket == node.outputs[1]: return '{0}.y'.format(vec) elif socket == node.outputs[2]: return '{0}.z'.format(vec) elif node.type == 'VECT_MATH': vec1 = parse_vector_input(node.inputs[0]) vec2 = parse_vector_input(node.inputs[1]) op = node.operation if op == 'DOT_PRODUCT': return 'dot({0}, {1})'.format(vec1, vec2) else: return '0.0'
def parse_rgb(node, socket): if node.type == 'GROUP': return parse_group(node, socket) elif node.type == 'GROUP_INPUT': return parse_input_group(node, socket) elif node.type == 'ATTRIBUTE': # Vcols only for now # node.attribute_name mat_state.data.add_elem('col', 3) return 'vcolor' elif node.type == 'RGB': return tovec3(socket.default_value) elif node.type == 'TEX_BRICK': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_CHECKER': curshader.add_function(functions.str_tex_checker) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) scale = parse_value_input(node.inputs[3]) return 'tex_checker({0}, {1}, {2}, {3})'.format(co, col1, col2, scale) elif node.type == 'TEX_ENVIRONMENT': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_GRADIENT': if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' grad = node.gradient_type if grad == 'LINEAR': f = '{0}.x'.format(co) elif grad == 'QUADRATIC': f = '0.0' elif grad == 'EASING': f = '0.0' elif grad == 'DIAGONAL': f = '({0}.x + {0}.y) * 0.5'.format(co) elif grad == 'RADIAL': f = 'atan({0}.y, {0}.x) / PI2 + 0.5'.format(co) elif grad == 'QUADRATIC_SPHERE': f = '0.0' elif grad == 'SPHERICAL': f = 'max(1.0 - sqrt({0}.x * {0}.x + {0}.y * {0}.y + {0}.z * {0}.z), 0.0)'.format(co) return 'vec3(clamp({0}, 0.0, 1.0))'.format(f) elif node.type == 'TEX_IMAGE': # Already fetched if res_var_name(node, node.outputs[1]) in parsed: return '{0}.rgb'.format(store_var_name(node)) tex_name = armutils.safe_source_name(node.name) tex = texture.make_texture(node, tex_name) if tex != None: return '{0}.rgb'.format(texture_store(node, tex, tex_name)) else: return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_MAGIC': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_MUSGRAVE': # Fall back to noise curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'vec3(tex_noise_f({0} * {1}))'.format(co, scale) elif node.type == 'TEX_NOISE': curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) # Slow.. return 'vec3(tex_noise({0} * {1}), tex_noise({0} * {1} + vec3(0.33)), tex_noise({0} * {1} + vec3(0.66)))'.format(co, scale) elif node.type == 'TEX_POINTDENSITY': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_SKY': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_VORONOI': curshader.add_function(functions.str_tex_voronoi) assets.add(armutils.get_sdk_path() + '/armory/Assets/' + 'noise64.png') assets.add_embedded_data('noise64.png') curshader.add_uniform('sampler2D snoise', link='_noise64') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) if node.coloring == 'INTENSITY': return 'vec3(tex_voronoi({0} / {1}).a)'.format(co, scale) else: # CELLS return 'tex_voronoi({0} / {1}).rgb'.format(co, scale) elif node.type == 'TEX_WAVE': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'BRIGHTCONTRAST': out_col = parse_vector_input(node.inputs[0]) bright = parse_value_input(node.inputs[1]) contr = parse_value_input(node.inputs[2]) curshader.add_function(\ """vec3 brightcontrast(const vec3 col, const float bright, const float contr) { float a = 1.0 + contr; float b = bright - contr * 0.5; return max(a * col + b, 0.0); } """) return 'brightcontrast({0}, {1}, {2})'.format(out_col, bright, contr) elif node.type == 'GAMMA': out_col = parse_vector_input(node.inputs[0]) gamma = parse_value_input(node.inputs[1]) return 'pow({0}, vec3({1}))'.format(out_col, gamma) elif node.type == 'HUE_SAT': # hue = parse_value_input(node.inputs[0]) # sat = parse_value_input(node.inputs[1]) # val = parse_value_input(node.inputs[2]) # fac = parse_value_input(node.inputs[3]) out_col = parse_vector_input(node.inputs[4]) # curshader.add_function(\ # """vec3 hue_sat(const float hue, const float sat, const float val, const float fac, const vec3 col) { # } # """) return out_col elif node.type == 'INVERT': fac = parse_value_input(node.inputs[0]) out_col = parse_vector_input(node.inputs[1]) return 'mix({0}, vec3(1.0) - ({0}), {1})'.format(out_col, fac) elif node.type == 'MIX_RGB': fac = parse_value_input(node.inputs[0]) fac_var = node_name(node.name) + '_fac' curshader.write('float {0} = {1};'.format(fac_var, fac)) col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) blend = node.blend_type if blend == 'MIX': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) elif blend == 'ADD': out_col = 'mix({0}, {0} + {1}, {2})'.format(col1, col2, fac_var) elif blend == 'MULTIPLY': out_col = 'mix({0}, {0} * {1}, {2})'.format(col1, col2, fac_var) elif blend == 'SUBTRACT': out_col = 'mix({0}, {0} - {1}, {2})'.format(col1, col2, fac_var) elif blend == 'SCREEN': out_col = '(vec3(1.0) - (vec3(1.0 - {2}) + {2} * (vec3(1.0) - {1})) * (vec3(1.0) - {0}))'.format(col1, col2, fac_var) elif blend == 'DIVIDE': out_col = '(vec3((1.0 - {2}) * {0} + {2} * {0} / {1}))'.format(col1, col2, fac_var) elif blend == 'DIFFERENCE': out_col = 'mix({0}, abs({0} - {1}), {2})'.format(col1, col2, fac_var) elif blend == 'DARKEN': out_col = 'min({0}, {1} * {2})'.format(col1, col2, fac_var) elif blend == 'LIGHTEN': out_col = 'max({0}, {1} * {2})'.format(col1, col2, fac_var) elif blend == 'OVERLAY': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'DODGE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'BURN': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'HUE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'SATURATION': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'VALUE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'COLOR': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'SOFT_LIGHT': out_col = '((1.0 - {2}) * {0} + {2} * ((vec3(1.0) - {0}) * {1} * {0} + {0} * (vec3(1.0) - (vec3(1.0) - {1}) * (vec3(1.0) - {0}))));'.format(col1, col2, fac) elif blend == 'LINEAR_LIGHT': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix # out_col = '({0} + {2} * (2.0 * ({1} - vec3(0.5))))'.format(col1, col2, fac_var) if node.use_clamp: return 'clamp({0}, vec3(0.0), vec3(1.0))'.format(out_col) else: return out_col elif node.type == 'CURVE_RGB': # Pass throuh return parse_vector_input(node.inputs[1]) elif node.type == 'BLACKBODY': # Pass constant return tovec3([0.84, 0.38, 0.0]) elif node.type == 'VALTORGB': # ColorRamp fac = parse_value_input(node.inputs[0]) interp = node.color_ramp.interpolation elems = node.color_ramp.elements if len(elems) == 1: return tovec3(elems[0].color) if interp == 'CONSTANT': fac_var = node_name(node.name) + '_fac' curshader.write('float {0} = {1};'.format(fac_var, fac)) # Get index out_i = '0' for i in range(1, len(elems)): out_i += ' + ({0} > {1} ? 1 : 0)'.format(fac_var, elems[i].position) # Write cols array cols_var = node_name(node.name) + '_cols' curshader.write('vec3 {0}[{1}];'.format(cols_var, len(elems))) for i in range(0, len(elems)): curshader.write('{0}[{1}] = vec3({2}, {3}, {4});'.format(cols_var, i, elems[i].color[0], elems[i].color[1], elems[i].color[2])) return '{0}[{1}]'.format(cols_var, out_i) else: # Linear, .. - 2 elems only, end pos assumed to be 1 # float f = clamp((pos - start) * (1.0 / (1.0 - start)), 0.0, 1.0); return 'mix({0}, {1}, clamp(({2} - {3}) * (1.0 / (1.0 - {3})), 0.0, 1.0))'.format(tovec3(elems[0].color), tovec3(elems[1].color), fac, elems[0].position) elif node.type == 'COMBHSV': # vec3 hsv2rgb(vec3 c) { # vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); # vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); # return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y); # } # vec3 rgb2hsv(vec3 c) { # vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); # vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g)); # vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r)); # float d = q.x - min(q.w, q.y); # float e = 1.0e-10; # return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x); # } # Pass constant return tovec3([0.0, 0.0, 0.0]) elif node.type == 'COMBRGB': r = parse_value_input(node.inputs[0]) g = parse_value_input(node.inputs[1]) b = parse_value_input(node.inputs[2]) return 'vec3({0}, {1}, {2})'.format(r, g, b) elif node.type == 'WAVELENGTH': # Pass constant return tovec3([0.0, 0.27, 0.19])
def export_data(fp, sdk_path, is_play=False, is_publish=False, in_viewport=False): global exporter wrd = bpy.data.worlds['Arm'] print('\nArmory v' + wrd.arm_version) # Clean compiled variants if cache is disabled if wrd.arm_cache_shaders == False: if os.path.isdir('build/html5-resources'): shutil.rmtree('build/html5-resources') if os.path.isdir('build/krom-resources'): shutil.rmtree('build/krom-resources') if os.path.isdir('build/window/krom-resources'): shutil.rmtree('build/window/krom-resources') if os.path.isdir('build/compiled/Shaders'): shutil.rmtree('build/compiled/Shaders') if os.path.isdir('build/compiled/ShaderDatas'): shutil.rmtree('build/compiled/ShaderDatas') if os.path.isdir('build/compiled/ShaderRaws'): shutil.rmtree('build/compiled/ShaderRaws') # Remove shader datas if shaders were deleted elif os.path.isdir('build/compiled/Shaders') == False and os.path.isdir('build/compiled/ShaderDatas') == True: shutil.rmtree('build/compiled/ShaderDatas') raw_shaders_path = sdk_path + 'armory/Shaders/' assets_path = sdk_path + 'armory/Assets/' export_physics = bpy.data.worlds['Arm'].arm_physics != 'Disabled' export_navigation = bpy.data.worlds['Arm'].arm_navigation != 'Disabled' assets.reset() # Build node trees # TODO: cache make_logic.build_node_trees() active_worlds = set() for scene in bpy.data.scenes: if scene.game_export and scene.world != None: active_worlds.add(scene.world) world_outputs = make_world.build_node_trees(active_worlds) make_renderpath.build_node_trees(assets_path) for wout in world_outputs: make_world.write_output(wout) # Export scene data assets.embedded_data = sorted(list(set(assets.embedded_data))) physics_found = False navigation_found = False ArmoryExporter.compress_enabled = is_publish ArmoryExporter.in_viewport = in_viewport for scene in bpy.data.scenes: if scene.game_export: ext = '.zip' if (scene.data_compressed and is_publish) else '.arm' asset_path = 'build/compiled/Assets/' + armutils.safe_filename(scene.name) + ext exporter.execute(bpy.context, asset_path) if physics_found == False and ArmoryExporter.export_physics: physics_found = True if navigation_found == False and ArmoryExporter.export_navigation: navigation_found = True assets.add(asset_path) if physics_found == False: # Disable physics anyway if no rigid body exported export_physics = False if navigation_found == False: export_navigation = False # Write referenced shader variants for ref in assets.shader_datas: # Data does not exist yet if not os.path.isfile(fp + '/' + ref): shader_name = ref.split('/')[3] # Extract from 'build/compiled/...' defs = make_utils.def_strings_to_array(wrd.world_defs) if shader_name.startswith('compositor_pass'): defs += make_utils.def_strings_to_array(wrd.compo_defs) compile_shader(raw_shaders_path, shader_name, defs) # Reset path os.chdir(fp) # Copy std shaders if not os.path.isdir('build/compiled/Shaders/std'): shutil.copytree(raw_shaders_path + 'std', 'build/compiled/Shaders/std') # Write compiled.glsl write_data.write_compiledglsl() # Write khafile.js write_data.write_khafilejs(is_play, export_physics, export_navigation, dce_full=is_publish) # Write Main.hx - depends on write_khafilejs for writing number of assets write_data.write_main(is_play, in_viewport, is_publish)
def export_data(fp, sdk_path, is_play=False, is_publish=False, in_viewport=False): global exporter wrd = bpy.data.worlds['Arm'] print('\nArmory v' + wrd.arm_version) # Clean compiled variants if cache is disabled if wrd.arm_cache_shaders == False: if os.path.isdir('build/html5-resources'): shutil.rmtree('build/html5-resources') if os.path.isdir('build/krom-resources'): shutil.rmtree('build/krom-resources') if os.path.isdir('build/window/krom-resources'): shutil.rmtree('build/window/krom-resources') if os.path.isdir('build/compiled/Shaders'): shutil.rmtree('build/compiled/Shaders') if os.path.isdir('build/compiled/ShaderDatas'): shutil.rmtree('build/compiled/ShaderDatas') if os.path.isdir('build/compiled/ShaderRaws'): shutil.rmtree('build/compiled/ShaderRaws') # Remove shader datas if shaders were deleted elif os.path.isdir('build/compiled/Shaders') == False and os.path.isdir('build/compiled/ShaderDatas') == True: shutil.rmtree('build/compiled/ShaderDatas') raw_shaders_path = sdk_path + 'armory/Shaders/' assets_path = sdk_path + 'armory/Assets/' export_physics = bpy.data.worlds['Arm'].arm_physics != 'Disabled' export_navigation = bpy.data.worlds['Arm'].arm_navigation != 'Disabled' assets.reset() # Build node trees # TODO: cache make_logic.build_node_trees() active_worlds = set() for scene in bpy.data.scenes: if scene.game_export and scene.world != None: active_worlds.add(scene.world) world_outputs = make_world.build_node_trees(active_worlds) make_renderpath.build_node_trees(assets_path) for wout in world_outputs: make_world.write_output(wout) # Export scene data assets.embedded_data = sorted(list(set(assets.embedded_data))) physics_found = False navigation_found = False ArmoryExporter.compress_enabled = is_publish ArmoryExporter.in_viewport = in_viewport for scene in bpy.data.scenes: if scene.game_export: ext = '.zip' if (scene.data_compressed and is_publish) else '.arm' asset_path = 'build/compiled/Assets/' + armutils.safe_filename(scene.name) + ext exporter.execute(bpy.context, asset_path) if physics_found == False and ArmoryExporter.export_physics: physics_found = True if navigation_found == False and ArmoryExporter.export_navigation: navigation_found = True assets.add(asset_path) if physics_found == False: # Disable physics anyway if no rigid body exported export_physics = False if navigation_found == False: export_navigation = False # Write referenced shader variants for ref in assets.shader_datas: # Data does not exist yet if not os.path.isfile(fp + '/' + ref): shader_name = ref.split('/')[3] # Extract from 'build/compiled/...' defs = make_utils.def_strings_to_array(wrd.world_defs + wrd.rp_defs) if shader_name.startswith('compositor_pass'): defs += make_utils.def_strings_to_array(wrd.compo_defs) compile_shader(raw_shaders_path, shader_name, defs) # Reset path os.chdir(fp) # Copy std shaders if not os.path.isdir('build/compiled/Shaders/std'): shutil.copytree(raw_shaders_path + 'std', 'build/compiled/Shaders/std') # Write compiled.glsl write_data.write_compiledglsl() # Write khafile.js write_data.write_khafilejs(is_play, export_physics, export_navigation, dce_full=is_publish) # Write Main.hx - depends on write_khafilejs for writing number of assets write_data.write_main(is_play, in_viewport, is_publish)
def parse_rgb(node, socket): if node.type == 'GROUP': return parse_group(node, socket) elif node.type == 'GROUP_INPUT': return parse_group_input(node, socket) elif node.type == 'ATTRIBUTE': # Vcols only for now # node.attribute_name mat_state.data.add_elem('col', 3) return 'vcolor' elif node.type == 'RGB': return tovec3(socket.default_value) elif node.type == 'TEX_BRICK': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_CHECKER': curshader.add_function(functions.str_tex_checker) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) scale = parse_value_input(node.inputs[3]) return 'tex_checker({0}, {1}, {2}, {3})'.format(co, col1, col2, scale) elif node.type == 'TEX_ENVIRONMENT': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_GRADIENT': if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' grad = node.gradient_type if grad == 'LINEAR': f = '{0}.x'.format(co) elif grad == 'QUADRATIC': f = '0.0' elif grad == 'EASING': f = '0.0' elif grad == 'DIAGONAL': f = '({0}.x + {0}.y) * 0.5'.format(co) elif grad == 'RADIAL': f = 'atan({0}.y, {0}.x) / PI2 + 0.5'.format(co) elif grad == 'QUADRATIC_SPHERE': f = '0.0' elif grad == 'SPHERICAL': f = 'max(1.0 - sqrt({0}.x * {0}.x + {0}.y * {0}.y + {0}.z * {0}.z), 0.0)'.format( co) return 'vec3(clamp({0}, 0.0, 1.0))'.format(f) elif node.type == 'TEX_IMAGE': # Already fetched if res_var_name(node, node.outputs[1]) in parsed: return '{0}.rgb'.format(store_var_name(node)) tex_name = armutils.safe_source_name(node.name) tex = texture.make_texture(node, tex_name) if tex != None: to_linear = parsing_basecol and not tex['file'].endswith('.hdr') return '{0}.rgb'.format( texture_store(node, tex, tex_name, to_linear)) elif node.image == None: # Empty texture tex = {} tex['name'] = tex_name tex['file'] = '' return '{0}.rgb'.format(texture_store(node, tex, tex_name, True)) else: tex_store = store_var_name(node) # Pink color for missing texture curshader.write( 'vec4 {0} = vec4(1.0, 0.0, 1.0, 1.0);'.format(tex_store)) return '{0}.rgb'.format(tex_store) elif node.type == 'TEX_MAGIC': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_MUSGRAVE': # Fall back to noise curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'vec3(tex_noise_f({0} * {1}))'.format(co, scale) elif node.type == 'TEX_NOISE': curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) # Slow.. return 'vec3(tex_noise({0} * {1}), tex_noise({0} * {1} + 0.33), tex_noise({0} * {1} + 0.66))'.format( co, scale) elif node.type == 'TEX_POINTDENSITY': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_SKY': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'TEX_VORONOI': curshader.add_function(functions.str_tex_voronoi) assets.add(armutils.get_sdk_path() + '/armory/Assets/' + 'noise64.png') assets.add_embedded_data('noise64.png') curshader.add_uniform('sampler2D snoise', link='_noise64') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) if node.coloring == 'INTENSITY': return 'vec3(tex_voronoi({0} / {1}).a)'.format(co, scale) else: # CELLS return 'tex_voronoi({0} / {1}).rgb'.format(co, scale) elif node.type == 'TEX_WAVE': # Pass through return tovec3([0.0, 0.0, 0.0]) elif node.type == 'BRIGHTCONTRAST': out_col = parse_vector_input(node.inputs[0]) bright = parse_value_input(node.inputs[1]) contr = parse_value_input(node.inputs[2]) curshader.add_function(\ """vec3 brightcontrast(const vec3 col, const float bright, const float contr) { float a = 1.0 + contr; float b = bright - contr * 0.5; return max(a * col + b, 0.0); } """) return 'brightcontrast({0}, {1}, {2})'.format(out_col, bright, contr) elif node.type == 'GAMMA': out_col = parse_vector_input(node.inputs[0]) gamma = parse_value_input(node.inputs[1]) return 'pow({0}, vec3({1}))'.format(out_col, gamma) elif node.type == 'HUE_SAT': # hue = parse_value_input(node.inputs[0]) # sat = parse_value_input(node.inputs[1]) # val = parse_value_input(node.inputs[2]) # fac = parse_value_input(node.inputs[3]) out_col = parse_vector_input(node.inputs[4]) # curshader.add_function(\ # """vec3 hue_sat(const float hue, const float sat, const float val, const float fac, const vec3 col) { # } # """) return out_col elif node.type == 'INVERT': fac = parse_value_input(node.inputs[0]) out_col = parse_vector_input(node.inputs[1]) return 'mix({0}, vec3(1.0) - ({0}), {1})'.format(out_col, fac) elif node.type == 'MIX_RGB': fac = parse_value_input(node.inputs[0]) fac_var = node_name(node.name) + '_fac' curshader.write('float {0} = {1};'.format(fac_var, fac)) col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) blend = node.blend_type if blend == 'MIX': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) elif blend == 'ADD': out_col = 'mix({0}, {0} + {1}, {2})'.format(col1, col2, fac_var) elif blend == 'MULTIPLY': out_col = 'mix({0}, {0} * {1}, {2})'.format(col1, col2, fac_var) elif blend == 'SUBTRACT': out_col = 'mix({0}, {0} - {1}, {2})'.format(col1, col2, fac_var) elif blend == 'SCREEN': out_col = '(vec3(1.0) - (vec3(1.0 - {2}) + {2} * (vec3(1.0) - {1})) * (vec3(1.0) - {0}))'.format( col1, col2, fac_var) elif blend == 'DIVIDE': out_col = '(vec3((1.0 - {2}) * {0} + {2} * {0} / {1}))'.format( col1, col2, fac_var) elif blend == 'DIFFERENCE': out_col = 'mix({0}, abs({0} - {1}), {2})'.format( col1, col2, fac_var) elif blend == 'DARKEN': out_col = 'min({0}, {1} * {2})'.format(col1, col2, fac_var) elif blend == 'LIGHTEN': out_col = 'max({0}, {1} * {2})'.format(col1, col2, fac_var) elif blend == 'OVERLAY': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'DODGE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'BURN': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'HUE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'SATURATION': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'VALUE': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'COLOR': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix elif blend == 'SOFT_LIGHT': out_col = '((1.0 - {2}) * {0} + {2} * ((vec3(1.0) - {0}) * {1} * {0} + {0} * (vec3(1.0) - (vec3(1.0) - {1}) * (vec3(1.0) - {0}))));'.format( col1, col2, fac) elif blend == 'LINEAR_LIGHT': out_col = 'mix({0}, {1}, {2})'.format(col1, col2, fac_var) # Revert to mix # out_col = '({0} + {2} * (2.0 * ({1} - vec3(0.5))))'.format(col1, col2, fac_var) if node.use_clamp: return 'clamp({0}, vec3(0.0), vec3(1.0))'.format(out_col) else: return out_col elif node.type == 'CURVE_RGB': # Pass throuh return parse_vector_input(node.inputs[1]) elif node.type == 'BLACKBODY': # Pass constant return tovec3([0.84, 0.38, 0.0]) elif node.type == 'VALTORGB': # ColorRamp fac = parse_value_input(node.inputs[0]) interp = node.color_ramp.interpolation elems = node.color_ramp.elements if len(elems) == 1: return tovec3(elems[0].color) if interp == 'CONSTANT': fac_var = node_name(node.name) + '_fac' curshader.write('float {0} = {1};'.format(fac_var, fac)) # Get index out_i = '0' for i in range(1, len(elems)): out_i += ' + ({0} > {1} ? 1 : 0)'.format( fac_var, elems[i].position) # Write cols array cols_var = node_name(node.name) + '_cols' curshader.write('vec3 {0}[{1}];'.format(cols_var, len(elems))) for i in range(0, len(elems)): curshader.write('{0}[{1}] = vec3({2}, {3}, {4});'.format( cols_var, i, elems[i].color[0], elems[i].color[1], elems[i].color[2])) return '{0}[{1}]'.format(cols_var, out_i) else: # Linear, .. - 2 elems only, end pos assumed to be 1 # float f = clamp((pos - start) * (1.0 / (1.0 - start)), 0.0, 1.0); return 'mix({0}, {1}, clamp(({2} - {3}) * (1.0 / (1.0 - {3})), 0.0, 1.0))'.format( tovec3(elems[0].color), tovec3(elems[1].color), fac, elems[0].position) elif node.type == 'COMBHSV': # vec3 hsv2rgb(vec3 c) { # vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); # vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); # return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y); # } # vec3 rgb2hsv(vec3 c) { # vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); # vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g)); # vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r)); # float d = q.x - min(q.w, q.y); # float e = 1.0e-10; # return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x); # } # Pass constant return tovec3([0.0, 0.0, 0.0]) elif node.type == 'COMBRGB': r = parse_value_input(node.inputs[0]) g = parse_value_input(node.inputs[1]) b = parse_value_input(node.inputs[2]) return 'vec3({0}, {1}, {2})'.format(r, g, b) elif node.type == 'WAVELENGTH': # Pass constant return tovec3([0.0, 0.27, 0.19])
def parse_value(node, socket): if node.type == 'GROUP': if node.node_tree.name.startswith('Armory PBR'): # Displacement if socket == node.outputs[1]: res = parse_value_input(node.inputs[10]) if node.inputs[ 11].is_linked or node.inputs[11].default_value != 1.0: res = "({0} * {1})".format( res, parse_value_input(node.inputs[11])) return res else: return None else: return parse_group(node, socket) elif node.type == 'GROUP_INPUT': return parse_group_input(node, socket) elif node.type == 'ATTRIBUTE': # Pass time till drivers are implemented if node.attribute_name == 'time': curshader.add_uniform('float time', link='_time') return 'time' else: return '0.0' elif node.type == 'CAMERA': # View Z Depth if socket == node.outputs[1]: return 'gl_FragCoord.z' # View Distance else: return 'length(eyeDir)' elif node.type == 'FRESNEL': ior = parse_value_input(node.inputs[0]) #nor = parse_vectorZ_input(node.inputs[1]) return 'pow(1.0 - dotNV, 7.25 / {0})'.format(ior) # max(dotNV, 0.0) elif node.type == 'NEW_GEOMETRY': if socket == node.outputs[6]: # Backfacing return '0.0' elif socket == node.outputs[7]: # Pointiness return '0.0' elif node.type == 'HAIR_INFO': # Is Strand # Intercept # Thickness return '0.5' elif node.type == 'LAYER_WEIGHT': blend = parse_value_input(node.inputs[0]) # nor = parse_vector_input(node.inputs[1]) if socket == node.outputs[0]: # Fresnel return 'clamp(pow(1.0 - dotNV, (1.0 - {0}) * 10.0), 0.0, 1.0)'.format( blend) elif socket == node.outputs[1]: # Facing return '((1.0 - dotNV) * {0})'.format(blend) elif node.type == 'LIGHT_PATH': if socket == node.outputs[0]: # Is Camera Ray return '1.0' elif socket == node.outputs[1]: # Is Shadow Ray return '0.0' elif socket == node.outputs[2]: # Is Diffuse Ray return '1.0' elif socket == node.outputs[3]: # Is Glossy Ray return '1.0' elif socket == node.outputs[4]: # Is Singular Ray return '0.0' elif socket == node.outputs[5]: # Is Reflection Ray return '0.0' elif socket == node.outputs[6]: # Is Transmission Ray return '0.0' elif socket == node.outputs[7]: # Ray Length return '0.0' elif socket == node.outputs[8]: # Ray Depth return '0.0' elif socket == node.outputs[9]: # Transparent Depth return '0.0' elif socket == node.outputs[10]: # Transmission Depth return '0.0' elif node.type == 'OBJECT_INFO': if socket == node.outputs[0]: # Object Index return '0.0' elif socket == node.outputs[1]: # Material Index return '0.0' elif socket == node.outputs[2]: # Random return '0.0' elif node.type == 'PARTICLE_INFO': if socket == node.outputs[0]: # Index return '0.0' elif socket == node.outputs[1]: # Age return '0.0' elif socket == node.outputs[2]: # Lifetime return '0.0' elif socket == node.outputs[4]: # Size return '0.0' elif node.type == 'VALUE': return tovec1(node.outputs[0].default_value) elif node.type == 'WIREFRAME': #node.use_pixel_size # size = parse_value_input(node.inputs[0]) return '0.0' elif node.type == 'TEX_BRICK': return '0.0' elif node.type == 'TEX_CHECKER': # TODO: do not recompute when color socket is also connected curshader.add_function(functions.str_tex_checker) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' col1 = parse_vector_input(node.inputs[1]) col2 = parse_vector_input(node.inputs[2]) scale = parse_value_input(node.inputs[3]) return 'tex_checker({0}, {1}, {2}, {3}).r'.format( co, col1, col2, scale) elif node.type == 'TEX_GRADIENT': return '0.0' elif node.type == 'TEX_IMAGE': # Already fetched if res_var_name(node, node.outputs[0]) in parsed: return '{0}.a'.format(store_var_name(node)) tex_name = armutils.safe_source_name(node.name) tex = texture.make_texture(node, tex_name) if tex != None: return '{0}.a'.format(texture_store(node, tex, tex_name)) else: tex_store = store_var_name(node) # Pink color for missing texture curshader.write( 'vec4 {0} = vec4(1.0, 0.0, 1.0, 1.0);'.format(tex_store)) return '{0}.a'.format(tex_store) elif node.type == 'TEX_MAGIC': return '0.0' elif node.type == 'TEX_MUSGRAVE': # Fall back to noise curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'tex_noise_f({0} * {1})'.format(co, scale) elif node.type == 'TEX_NOISE': curshader.add_function(functions.str_tex_noise) if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) # detail = parse_value_input(node.inputs[2]) # distortion = parse_value_input(node.inputs[3]) return 'tex_noise({0} * {1})'.format(co, scale) elif node.type == 'TEX_POINTDENSITY': return '0.0' elif node.type == 'TEX_VORONOI': curshader.add_function(functions.str_tex_voronoi) assets.add(armutils.get_sdk_path() + '/armory/Assets/' + 'noise64.png') assets.add_embedded_data('noise64.png') curshader.add_uniform('sampler2D snoise', link='_noise64') if node.inputs[0].is_linked: co = parse_vector_input(node.inputs[0]) else: co = 'wposition' scale = parse_value_input(node.inputs[1]) if node.coloring == 'INTENSITY': return 'tex_voronoi({0} * {1}).a'.format(co, scale) else: # CELLS return 'tex_voronoi({0} * {1}).r'.format(co, scale) elif node.type == 'TEX_WAVE': return '0.0' elif node.type == 'LIGHT_FALLOFF': # Constant, linear, quadratic # Shaders default to quadratic for now return '1.0' elif node.type == 'NORMAL': nor = parse_vector_input(node.inputs[0]) return 'dot({0}, {1})'.format(tovec3(node.outputs[0].default_value), nor) elif node.type == 'VALTORGB': # ColorRamp return '1.0' elif node.type == 'MATH': val1 = parse_value_input(node.inputs[0]) val2 = parse_value_input(node.inputs[1]) op = node.operation if op == 'ADD': out_val = '({0} + {1})'.format(val1, val2) elif op == 'SUBTRACT': out_val = '({0} - {1})'.format(val1, val2) elif op == 'MULTIPLY': out_val = '({0} * {1})'.format(val1, val2) elif op == 'DIVIDE': out_val = '({0} / {1})'.format(val1, val2) elif op == 'SINE': out_val = 'sin({0})'.format(val1) elif op == 'COSINE': out_val = 'cos({0})'.format(val1) elif op == 'TANGENT': out_val = 'tan({0})'.format(val1) elif op == 'ARCSINE': out_val = 'asin({0})'.format(val1) elif op == 'ARCCOSINE': out_val = 'acos({0})'.format(val1) elif op == 'ARCTANGENT': out_val = 'atan({0})'.format(val1) elif op == 'POWER': out_val = 'pow({0}, {1})'.format(val1, val2) elif op == 'LOGARITHM': out_val = 'log({0})'.format(val1) elif op == 'MINIMUM': out_val = 'min({0}, {1})'.format(val1, val2) elif op == 'MAXIMUM': out_val = 'max({0}, {1})'.format(val1, val2) elif op == 'ROUND': # out_val = 'round({0})'.format(val1) out_val = 'floor({0} + 0.5)'.format(val1) elif op == 'LESS_THAN': out_val = 'float({0} < {1})'.format(val1, val2) elif op == 'GREATER_THAN': out_val = 'float({0} > {1})'.format(val1, val2) elif op == 'MODULO': # out_val = 'float({0} % {1})'.format(val1, val2) out_val = 'mod({0}, {1})'.format(val1, val2) elif op == 'ABSOLUTE': out_val = 'abs({0})'.format(val1) if node.use_clamp: return 'clamp({0}, 0.0, 1.0)'.format(out_val) else: return out_val elif node.type == 'RGBTOBW': col = parse_vector_input(node.inputs[0]) return '((({0}.r * 0.3 + {0}.g * 0.59 + {0}.b * 0.11) / 3.0) * 2.5)'.format( col) elif node.type == 'SEPHSV': return '0.0' elif node.type == 'SEPRGB': col = parse_vector_input(node.inputs[0]) if socket == node.outputs[0]: return '{0}.r'.format(col) elif socket == node.outputs[1]: return '{0}.g'.format(col) elif socket == node.outputs[2]: return '{0}.b'.format(col) elif node.type == 'SEPXYZ': vec = parse_vector_input(node.inputs[0]) if socket == node.outputs[0]: return '{0}.x'.format(vec) elif socket == node.outputs[1]: return '{0}.y'.format(vec) elif socket == node.outputs[2]: return '{0}.z'.format(vec) elif node.type == 'VECT_MATH': vec1 = parse_vector_input(node.inputs[0]) vec2 = parse_vector_input(node.inputs[1]) op = node.operation if op == 'DOT_PRODUCT': return 'dot({0}, {1})'.format(vec1, vec2) else: return '0.0'
def make_ssao_pass(stages, node_group, node): make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[3, 4], bind_target_constants=['gbufferD', 'gbuffer0'], shader_context='ssao_pass/ssao_pass/ssao_pass', viewport_scale=bpy.data.worlds['Arm'].generate_ssao_texture_scale) make_quad_pass(stages, node_group, node, target_index=2, bind_target_indices=[1, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x', viewport_scale=bpy.data.worlds['Arm'].generate_ssao_texture_scale) make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[2, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def make_ssao_reproject_pass(stages, node_group, node): make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[3, 4, 2, 5], bind_target_constants=['gbufferD', 'gbuffer0', 'slast', 'sveloc'], shader_context='ssao_reproject_pass/ssao_reproject_pass/ssao_reproject_pass') make_quad_pass(stages, node_group, node, target_index=2, bind_target_indices=[1, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x') make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[2, 4], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def add_irr_assets(output_file_irr): assets.add(output_file_irr + '.arm')
def make_apply_ssao_pass(stages, node_group, node): make_quad_pass(stages, node_group, node, target_index=2, bind_target_indices=[4, 5], bind_target_constants=['gbufferD', 'gbuffer0'], shader_context='ssao_pass/ssao_pass/ssao_pass') make_quad_pass(stages, node_group, node, target_index=3, bind_target_indices=[2, 5], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_x') make_quad_pass(stages, node_group, node, target_index=1, bind_target_indices=[3, 5], bind_target_constants=['tex', 'gbuffer0'], shader_context='blur_edge_pass/blur_edge_pass/blur_edge_pass_y_blend') assets.add(build_node_trees.assets_path + 'noise8.png') assets.add_embedded_data('noise8.png')
def add_rad_assets(output_file_rad, rad_format, num_mips): assets.add(output_file_rad + '.' + rad_format) for i in range(0, num_mips): assets.add(output_file_rad + '_' + str(i) + '.' + rad_format)
def build_node_tree(world): output = {} dat = {} output['material_datas'] = [dat] dat['name'] = armutils.safe_filename(world.name) + '_material' context = {} dat['contexts'] = [context] context['name'] = 'world' context['bind_constants'] = [] context['bind_textures'] = [] bpy.data.worlds['Arm'].world_defs = '' # Traverse world node tree output_node = nodes.get_node_by_type(world.node_tree, 'OUTPUT_WORLD') if output_node != None: parse_world_output(world, output_node, context) # Clear to color if no texture or sky is provided wrd = bpy.data.worlds['Arm'] if '_EnvSky' not in wrd.world_defs and '_EnvTex' not in wrd.world_defs: if '_EnvImg' not in wrd.world_defs: wrd.world_defs += '_EnvCol' # Irradiance json file name world.world_envtex_name = world.name world.world_envtex_irr_name = world.name write_probes.write_color_irradiance(world.name, world.world_envtex_color) # Clouds enabled if wrd.generate_clouds: wrd.world_defs += '_EnvClouds' # Percentage closer soft shadows if wrd.generate_pcss_state == 'On': wrd.world_defs += '_PCSS' sdk_path = armutils.get_sdk_path() assets.add(sdk_path + 'armory/Assets/noise64.png') assets.add_embedded_data('noise64.png') # Screen-space ray-traced shadows if wrd.generate_ssrs: wrd.world_defs += '_SSRS' # Alternative models if wrd.diffuse_model == 'Oren Nayar': wrd.world_defs += '_OrenNayar' # TODO: Lamp texture test.. if wrd.generate_lamp_texture != '': bpy.data.worlds['Arm'].world_defs += '_LampColTex' if not wrd.generate_lamp_falloff: bpy.data.worlds['Arm'].world_defs += '_NoLampFalloff' voxelgi = False for cam in bpy.data.cameras: if cam.is_probe: wrd.world_defs += '_Probes' if cam.rp_shadowmap == 'None': wrd.world_defs += '_NoShadows' if cam.rp_voxelgi: voxelgi = True if voxelgi: assets.add_khafile_def('arm_voxelgi') wrd.world_defs += '_VoxelGI' wrd.world_defs += '_Rad' # Always do radiance for voxels wrd.world_defs += '_Irr' # Area lamps for lamp in bpy.data.lamps: if lamp.type == 'AREA': wrd.world_defs += '_PolyLight' break # Data will be written after render path has been processed to gather all defines return output
def parse_color(world, node, context, envmap_strength_const): wrd = bpy.data.worlds['Arm'] # Env map included if node.type == 'TEX_ENVIRONMENT' and node.image != None: image = node.image filepath = image.filepath if image.packed_file == None and not os.path.isfile( armutils.safe_assetpath(filepath)): log.warn(world.name + ' - unable to open ' + image.filepath) return tex = {} context['bind_textures'].append(tex) tex['name'] = 'envmap' tex['u_addressing'] = 'clamp' tex['v_addressing'] = 'clamp' # Reference image name tex['file'] = armutils.extract_filename(image.filepath) tex['file'] = armutils.safe_filename(tex['file']) base = tex['file'].rsplit('.', 1) ext = base[1].lower() if ext == 'hdr': target_format = 'HDR' else: target_format = 'JPEG' do_convert = ext != 'hdr' and ext != 'jpg' if do_convert: if ext == 'exr': tex['file'] = base[0] + '.hdr' target_format = 'HDR' else: tex['file'] = base[0] + '.jpg' target_format = 'JPEG' if image.packed_file != None: # Extract packed data unpack_path = armutils.get_fp() + '/build/compiled/Assets/unpacked' if not os.path.exists(unpack_path): os.makedirs(unpack_path) unpack_filepath = unpack_path + '/' + tex['file'] filepath = unpack_filepath if do_convert: if not os.path.isfile(unpack_filepath): armutils.write_image(image, unpack_filepath, file_format=target_format) elif os.path.isfile(unpack_filepath) == False or os.path.getsize( unpack_filepath) != image.packed_file.size: with open(unpack_filepath, 'wb') as f: f.write(image.packed_file.data) assets.add(unpack_filepath) else: if do_convert: converted_path = armutils.get_fp( ) + '/build/compiled/Assets/unpacked/' + tex['file'] filepath = converted_path # TODO: delete cache when file changes if not os.path.isfile(converted_path): armutils.write_image(image, converted_path, file_format=target_format) assets.add(converted_path) else: # Link image path to assets assets.add(armutils.safe_assetpath(image.filepath)) # Generate prefiltered envmaps world.world_envtex_name = tex['file'] world.world_envtex_irr_name = tex['file'].rsplit('.', 1)[0] disable_hdr = target_format == 'JPEG' mip_count = world.world_envtex_num_mips mip_count = write_probes.write_probes( filepath, disable_hdr, mip_count, generate_radiance=wrd.generate_radiance) world.world_envtex_num_mips = mip_count # Append envtex define bpy.data.worlds['Arm'].world_defs += '_EnvTex' # Append LDR define if disable_hdr: bpy.data.worlds['Arm'].world_defs += '_EnvLDR' # Append radiance define if wrd.generate_irradiance and wrd.generate_radiance: bpy.data.worlds['Arm'].world_defs += '_Rad' # Static image background elif node.type == 'TEX_IMAGE': bpy.data.worlds['Arm'].world_defs += '_EnvImg' tex = {} context['bind_textures'].append(tex) tex['name'] = 'envmap' # No repeat for now tex['u_addressing'] = 'clamp' tex['v_addressing'] = 'clamp' image = node.image filepath = image.filepath if image.packed_file != None: # Extract packed data filepath = '/build/compiled/Assets/unpacked' unpack_path = armutils.get_fp() + filepath if not os.path.exists(unpack_path): os.makedirs(unpack_path) unpack_filepath = unpack_path + '/' + image.name if os.path.isfile(unpack_filepath) == False or os.path.getsize( unpack_filepath) != image.packed_file.size: with open(unpack_filepath, 'wb') as f: f.write(image.packed_file.data) assets.add(unpack_filepath) else: # Link image path to assets assets.add(armutils.safe_assetpath(image.filepath)) # Reference image name tex['file'] = armutils.extract_filename(image.filepath) tex['file'] = armutils.safe_filename(tex['file']) # Append sky define elif node.type == 'TEX_SKY': # Match to cycles envmap_strength_const['float'] *= 0.1 bpy.data.worlds['Arm'].world_defs += '_EnvSky' # Append sky properties to material const = {} const['name'] = 'sunDirection' sun_direction = [ node.sun_direction[0], node.sun_direction[1], node.sun_direction[2] ] sun_direction[1] *= -1 # Fix Y orientation const['vec3'] = list(sun_direction) context['bind_constants'].append(const) world.world_envtex_sun_direction = sun_direction world.world_envtex_turbidity = node.turbidity world.world_envtex_ground_albedo = node.ground_albedo # Irradiance json file name world.world_envtex_irr_name = world.name write_probes.write_sky_irradiance(world.name) # Radiance if wrd.generate_radiance_sky and wrd.generate_radiance and wrd.generate_irradiance: bpy.data.worlds['Arm'].world_defs += '_Rad' if wrd.generate_radiance_sky_type == 'Hosek': hosek_path = 'armory/Assets/hosek/' else: hosek_path = 'armory/Assets/hosek_fake/' sdk_path = armutils.get_sdk_path() # Use fake maps for now assets.add(sdk_path + hosek_path + 'hosek_radiance.hdr') for i in range(0, 8): assets.add(sdk_path + hosek_path + 'hosek_radiance_' + str(i) + '.hdr') world.world_envtex_name = 'hosek' world.world_envtex_num_mips = 8
def make_texture(image_node, tex_name): wrd = bpy.data.worlds["Arm"] tex = {} tex["name"] = tex_name tex["file"] = "" image = image_node.image matname = mat_state.material.name if image == None: return None if image.filepath == "": log.warn(matname + "/" + image.name + " - file path not found") return None # Reference image name tex["file"] = armutils.extract_filename(image.filepath) tex["file"] = armutils.safefilename(tex["file"]) s = tex["file"].rsplit(".", 1) if len(s) == 1: log.warn(matname + "/" + image.name + " - file extension required for image name") return None ext = s[1].lower() do_convert = ext != "jpg" and ext != "png" and ext != "hdr" # Convert image if do_convert: tex["file"] = tex["file"].rsplit(".", 1)[0] + ".jpg" # log.warn(matname + '/' + image.name + ' - image format is not (jpg/png/hdr), converting to jpg.') if image.packed_file != None: # Extract packed data unpack_path = armutils.get_fp() + "/build/compiled/Assets/unpacked" if not os.path.exists(unpack_path): os.makedirs(unpack_path) unpack_filepath = unpack_path + "/" + tex["file"] if do_convert: if not os.path.isfile(unpack_filepath): armutils.write_image(image, unpack_filepath) # Write bytes if size is different or file does not exist yet elif os.path.isfile(unpack_filepath) == False or os.path.getsize(unpack_filepath) != image.packed_file.size: with open(unpack_filepath, "wb") as f: f.write(image.packed_file.data) assets.add(unpack_filepath) else: # if not os.path.isfile(image.filepath): # log.warn(matname + '/' + image.name + ' - file not found') # return tex if do_convert: converted_path = armutils.get_fp() + "/build/compiled/Assets/unpacked/" + tex["file"] # TODO: delete cache when file changes if not os.path.isfile(converted_path): armutils.write_image(image, converted_path) assets.add(converted_path) else: # Link image path to assets assets.add(armutils.safe_assetpath(image.filepath)) # if image_format != 'RGBA32': # tex['format'] = image_format interpolation = image_node.interpolation aniso = wrd.anisotropic_filtering_state if aniso == "On": interpolation = "Smart" elif aniso == "Off" and interpolation == "Smart": interpolation = "Linear" # TODO: Blender seems to load full images on size request, cache size instead powimage = is_pow(image.size[0]) and is_pow(image.size[1]) # Pow2 required to generate mipmaps if powimage == True: if interpolation == "Cubic": # Mipmap linear tex["mipmap_filter"] = "linear" tex["generate_mipmaps"] = True elif interpolation == "Smart": # Mipmap anisotropic tex["min_filter"] = "anisotropic" tex["mipmap_filter"] = "linear" tex["generate_mipmaps"] = True elif image_node.interpolation == "Cubic" or image_node.interpolation == "Smart": log.warn( matname + "/" + image.name + " - power of 2 texture required for " + image_node.interpolation + " interpolation" ) if image_node.extension != "REPEAT": # Extend or clip tex["u_addressing"] = "clamp" tex["v_addressing"] = "clamp" else: if state.target == "html5" and powimage == False: log.warn(matname + "/" + image.name + " - non power of 2 texture can not use repeat mode on HTML5 target") tex["u_addressing"] = "clamp" tex["v_addressing"] = "clamp" # if image.source == 'MOVIE': # Just append movie texture trait for now # movie_trait = {} # movie_trait['type'] = 'Script' # movie_trait['class_name'] = 'armory.trait.internal.MovieTexture' # movie_trait['parameters'] = [tex['file']] # for o in self.materialToGameObjectDict[material]: # o['traits'].append(movie_trait) # tex['source'] = 'movie' # tex['file'] = '' # MovieTexture will load the video return tex