Esempio n. 1
0
def get_meta_data(path):
    FilePaths().log_events("Read IMG Meta Data\n")
    bag = gdal.Open(path.get_import_file_name())
    bag_gtrn = bag.GetGeoTransform()
    bag_proj = bag.GetProjectionRef()
    bag_srs = osr.SpatialReference(bag_proj)
    geo_srs = bag_srs.CloneGeogCS()
    transform = osr.CoordinateTransformation(bag_srs, geo_srs)

    bag_bbox_cells = (
        (0., 0.),
        (0, bag.RasterYSize),
        (bag.RasterXSize, bag.RasterYSize),
        (bag.RasterXSize, 0),
    )

    geo_pts = []
    pix_pts = []
    xy_pts = []
    for x, y in bag_bbox_cells:
        x2 = bag_gtrn[0] + bag_gtrn[1] * x + bag_gtrn[2] * y
        y2 = bag_gtrn[3] + bag_gtrn[4] * x + bag_gtrn[5] * y
        geo_pt = transform.TransformPoint(x2, y2)[:2]
        geo_pts.append(geo_pt)
        pix_pts.append([x2, y2])
        xy_pts.append([x, y])
    FilePaths().log_events("XY Points: " + str(xy_pts) + "\n")
    FilePaths().log_events("Pixel Points: " + str(pix_pts) + "\n")
    FilePaths().log_events("Geo Points: " + str(geo_pts) + "\n")
    return [xy_pts, pix_pts, geo_pts]
def main():
    FilePaths().log_events("ANIMATE STILL IMAGES")
    image_path = sys.argv[5]
    FilePaths().log_events("Image Path: " + image_path)
    output_path = sys.argv[6]
    FilePaths().log_events("Output Path: " + output_path)
    animater = AnimateScene(image_path, output_path)
    animater.animate()
Esempio n. 3
0
def do_create_scene(scene, split):
    FilePaths().log_events("Create Scene\n")
    scene.camera_path()
    scene.make_camera()
    scene.create_lamp()
    scene.key_frame_camera()
    scene.set_camera_orientation()
    scene.set_cycles_options()
    scene.set_end_frame()
    FilePaths().log_events("Splitting Terrain\n")
Esempio n. 4
0
def do_import(in_obj, blend_config, binmode):
    in_obj.clear_blend_file()
    objName = "terrain"
    blend_config.set_terrain(objName)

    # Importing Functions.
    #in_obj.import_hirise_img("BIN2")
    #in_obj.import_hirise_img("BIN6", 0.01)
    FilePaths().log_events("Import DTM\n")
    in_obj.import_hirise_img(binmode, 0.01)
    FilePaths().log_events("Create Material/Texture\n")
    in_obj.set_material_option()
    in_obj.select_object()
Esempio n. 5
0
def main():
    FilePaths().log_events("MAIN PROGRAM\n")
    FilePaths().log_events("System Args: " + str(sys.argv) + "\n")
    job_dir = sys.argv[2]

    # Create Absolute file path variables used to create the job directory structure
    FilePaths().log_events("Creating Directory Structure\n")
    job_hadoop = job_dir + "/hadoop"
    job_hadoop_in = job_hadoop + "/input"
    job_hadoop_out = job_hadoop + "/output"
    job_temp = job_dir + "/temp"
    job_assets = job_dir + "/assets"

    # Name of the blend file
    blend_file = sys.argv[4] + ".blend"
    # For Future Implementation
    texture_file = None
    dtm_file = "my_image.IMG"

    # Create Directory Structure for The Current Job
    if not os.path.isdir(job_dir):
        os.makedirs(job_dir)
    FilePaths().log_events("Current Job: " + job_dir + "\n")

    if not os.path.isdir(job_hadoop):
        os.makedirs(job_hadoop)
    FilePaths().log_events("Hadoop Job: " + job_hadoop + "\n")

    if not os.path.isdir(job_hadoop_in):
        os.makedirs(job_hadoop_in)
    FilePaths().log_events("Hadoop Input: " + job_hadoop_in + "\n")

    if not os.path.isdir(job_temp):
        os.makedirs(job_temp)
    FilePaths().log_events("Current Job Temp File: " + job_temp + "\n")

    if not os.path.isdir(job_assets):
        os.makedirs(job_assets)
    FilePaths().log_events("Current Job Assets: " + job_assets + "\n")

    make_blend_file(job_dir, blend_file, str(texture_file), dtm_file, sys.argv[1])
    render_scenes(job_hadoop)
    animate_movie(job_dir, sys.argv[4])
    #CALL POST SEND EMAIL

    FilePaths().log_events("Send Email to User\n")
    r = requests.post("http://et-etb10c-x02:8281/completed", str(sys.argv[4]))
Esempio n. 6
0
def make_blend_file(job_dir, blend_file, texture_file, dtm_file, json):
    FilePaths().log_events("CREATE SCENE\n")
    # incoming args list
    # args[0]
    # args[1] json
    # args[2] job_path
    # args[3] output_dir
    # args[4] randomid

    blender = FilePaths().get_blender_exec()
    FilePaths().log_events("Blender Exececutable: " + blender + "\n")
    script = FilePaths().get_abs_path_project() + "job.py"
    FilePaths().log_events("Script File: " + script)
    # subprocess call arguments
    # arg[0] blender
    # arg[1] -b
    # arg[2] -P
    # arg[3] script
    # arg[4] --
    # arg[5] json
    FilePaths().log_events("Creating Scene: " + blender + " -b -P " + script + " -- " + json + " " + job_dir + " " +
                           blend_file + " " + texture_file + " " + dtm_file + "\n")
    sub = subprocess.Popen(
        [blender + " -b -P " + script + " -- " + json + " " + job_dir + " " + blend_file + " " + texture_file + " " +
         dtm_file], shell=True)
    sub.communicate()
Esempio n. 7
0
def do_render(path, config, split, rid):
    FilePaths().log_events("Preprocess Render Jobs\n")
    input = path.get_cur_working_dir() + "/hadoop/input/"
    frame_count = config.get_end_frame()
    frame_step = config.get_end_frame() if path.get_render_count(
    ) > config.get_end_frame() else path.get_render_count()
    start = end = 1
    job_num = 0
    while end < frame_count:
        f = open(input + "input_" + str(job_num) + ".txt", 'w')
        end = start + frame_step
        #job_file = split.create_job(start, end, path.get_cur_working_dir() + "/assets/", job_num, 'terrain', path.get_blend_file())
        job_file = rid + ".blend"
        f.write(
            str(start) + " " + str(end) + " " + str(rid) + " " +
            str(job_file) + " ")
        FilePaths().log_events("Job: " + str(job_num) + ", File: " +
                               str(job_file) + ", Start Frame: " + str(start) +
                               ", End Frame: " + str(end) + "\n")

        start = end + 1
        job_num += 1
        f.close()
Esempio n. 8
0
def animate_movie(job_dir, rid):
    FilePaths().log_events("ANIMATE SCENE\n")
    blender = FilePaths().get_blender_exec()
    FilePaths().log_events("Blender Executable: " + blender + "\n")
    output = FilePaths().get_final_output_dir() + rid + "/"
    FilePaths().log_events("Movie Output Location: " + output + "\n")

    cmd = blender
    cmd += " -b -P "
    cmd += FilePaths().get_abs_path_project() + "animate_scene.py"
    cmd += " -- "
    cmd += job_dir + "/temp/ "
    cmd += output

    FilePaths().log_events("Execute Animation: " + cmd + "\n")
    sub = subprocess.Popen([cmd], shell=True)
    sub.communicate()
Esempio n. 9
0
def render_scenes(hadoop_in):
    FilePaths().log_events("RENDER SCENE\n")
    hadoop = FilePaths().get_hadoop_exec()
    FilePaths().log_events("Hadoop Executable: " + hadoop + "\n")
    hadoop_streaming = FilePaths().get_hadoop_streaming()
    FilePaths().log_events("Hadoop Streaming jar: " + hadoop_streaming + "\n")

    cmd = hadoop
    cmd += " jar "
    cmd += hadoop_streaming
    cmd += " -input "
    cmd += hadoop_in + "/input/"
    cmd += " -output "
    cmd += hadoop_in + "/output/"
    cmd += " -mapper "
    cmd += FilePaths().get_abs_path_project() + "mapper.py"
    cmd += " -reducer "
    cmd += FilePaths().get_abs_path_project() + "reducer.py"

    FilePaths().log_events("Execute Hadoop Process: " + cmd + "\n")
    sub = subprocess.Popen([cmd], shell=True)
    sub.communicate()
Esempio n. 10
0
#!/usr/bin/env python

import sys
import subprocess
from jpl_conf import FilePaths

main_scene = FilePaths().get_job_dir()
blender = FilePaths().get_blender_exec()
script = FilePaths().get_abs_path_project() + "blender_reduce.py"

for input_data in sys.stdin:
    job_file = main_scene
    input_data = input_data.strip()
    start, end, rid, file_name = input_data.split()
    job_file += rid + "/assets/"
    job_file += file_name
    sub = subprocess.Popen([blender + " " + job_file + " -b -P " + script + " -- " +
                            str(start) + " " +
                            str(end) + " " + str(rid)], shell=True)
    sub.communicate()
Esempio n. 11
0
 def wrap(*args):
     time1 = time.time()
     ret = f(*args)
     time2 = time.time()
     FilePaths().log_events('TIMING------: %s function took %0.3f ms' % (f.__name__, (time2 - time1)*1000.0) + "\n")
Esempio n. 12
0
def main(json=None):
    # Parse JSON input into point, angle and sun_data
    json_parse = CZML_Parser(sys.argv[5])
    point, angle = json_parse.blenderCamera()
    FilePaths().log_events("JSON Camera Data Points: " + str(point) + "\n" +
                           "JSON Camera Data Angles: " + str(angle) + "\n")
    sun_data = json_parse.sundata()
    FilePaths().log_events("JSON Sun Data: " + str(sun_data) + "\n")
    # Convert Sun Data to usable points of azimuth and zenith
    sun_pos = unitize(sun_data)
    sun_ori = generate_sun(sun_data)
    FilePaths().log_events("Converted Sun Position: " + str(sun_pos) + "\n" +
                           "Converted Sun Angle: " + str(sun_ori) + "\n")

    # Set Filename variables
    out_file = sys.argv[7]
    FilePaths().log_events("Received Output File Name: " + str(sys.argv[7]) +
                           "\n")
    in_file = sys.argv[9]
    FilePaths().log_events("Received Input File Name: " + str(sys.argv[9]) +
                           "\n")
    text_file = json_parse.texture_file()
    FilePaths().log_events("Received Texture File: " + text_file + "\n")
    img_binmode = json_parse.render_quality()

    if text_file == "None":
        text_file = None

    FilePaths().log_events("Received Texture File Name: " + str(sys.argv[8]) +
                           "\n")

    # Create Class Objects
    FilePaths().log_events("Creating Class Objects --> ")
    file_path = FilePaths(in_file, out_file, text_file)
    blend_config = Blender_Config_Options()
    meta_data = get_meta_data(file_path)
    my_importer = Importer(file_path, blend_config)
    my_scene = BuildScene(blend_config, file_path, meta_data,
                          [point, angle, sun_ori, sun_pos])
    FilePaths().log_events("Complete\n")

    # Set the current working directory for this job
    FilePaths().log_events("Setting Current Job Working Directory: " +
                           str(sys.argv[6]))
    file_path.set_cur_working_dir(sys.argv[6])

    if img_binmode == "low":
        img_binmode = "BIN12-FAST"
    elif img_binmode == "medium":
        img_binmode = "BIN6"
    elif img_binmode == "high":
        img_binmode = "BIN2"

    file_path.set_binmode(img_binmode)

    # Convert Camera locations to blender coordinates
    FilePaths().log_events(
        "Converting JSON Points to Pixel Coordinates ----> ")
    user_points_converted = []
    for pt in point:
        convert = my_scene.geo_2_pix(float(pt[1]), float(pt[2]), float(pt[3]))
        user_points_converted.append(convert)
    FilePaths().log_events("Complete\n" + str(user_points_converted) + "\n")

    # Execute Class Functions
    do_import(my_importer, blend_config, img_binmode)
    do_create_scene(my_scene, my_importer)

    # Save all the options into a blend file
    my_importer.save_scene(out_file)

    do_render(file_path, blend_config, my_importer,
              out_file[0:out_file.find('.')])
    def animate(self):
        # Filter file list by valid file types.
        candidates = []
        c = 0
        for item in self.lst:
            fileName, fileExtension = os.path.splitext(self.lst[c])
            if fileExtension == ".png":
                candidates.append(item)
            c = +1

        FilePaths().log_events("Number of Images: " + str(len(candidates)) +
                               "\n")

        candidates.sort()

        file = [{"name": i} for i in candidates]
        n = len(file)

        def find_sequencer_area():
            screens = [bpy.context.screen] + list(bpy.data.screens)
            for screen in screens:
                for area in screen.areas:
                    if area.type == 'SEQUENCE_EDITOR':
                        return area

            # If that still doesn't work, I don't know what will
            return area

        a = bpy.ops.sequencer.image_strip_add({'area': find_sequencer_area()},
                                              directory=self.in_dir,
                                              filter_blender=False,
                                              filter_image=True,
                                              filter_movie=False,
                                              filter_python=False,
                                              filter_font=False,
                                              filter_sound=False,
                                              filter_text=False,
                                              filter_btx=False,
                                              filter_collada=False,
                                              filter_folder=True,
                                              filemode=9,
                                              relative_path=False,
                                              frame_start=0,
                                              frame_end=n - 1,
                                              sort_method='FILE_SORT_ALPHA',
                                              channel=1,
                                              replace_sel=True,
                                              files=file)
        # (directory=in_dir, files=file, channel=1, frame_start=0, frame_end=n - 1)

        stripname = file[0].get("name")
        bpy.data.scenes["Scene"].frame_end = n
        bpy.data.scenes["Scene"].render.image_settings.file_format = 'H264'
        bpy.data.scenes["Scene"].render.filepath = self.out_dir
        bpy.ops.render.render(animation=True)

        # Diagnostic to check whether the images were loaded
        stripname = file[0].get("name")
        FilePaths().log_events(
            str(bpy.data.scenes["Scene"].sequence_editor.sequences[stripname])
            + "\n")
        FilePaths().log_events(
            str(
                dir(bpy.data.scenes["Scene"].sequence_editor.
                    sequences[stripname])) + "\n")
def main():
    blender_options = Blender_Config_Options()
    # Set Render Engine to Cycles
    bpy.data.scenes["Scene"].render.engine = blender_options.get_render_engine(
    )

    bpy.data.scenes[
        "Scene"].view_settings.view_transform = blender_options.get_view_render_color(
        )

    # Rendering Resolution
    bpy.data.scenes[
        "Scene"].render.resolution_x = blender_options.get_render_res_x()
    bpy.data.scenes[
        "Scene"].render.resolution_y = blender_options.get_render_res_y()
    bpy.data.scenes[
        "Scene"].render.resolution_percentage = blender_options.get_render_res_percent(
        )
    bpy.data.scenes[
        "Scene"].render.use_border = blender_options.get_use_border()
    bpy.data.scenes[
        "Scene"].render.use_crop_to_border = blender_options.get_crop_to_border(
        )

    # Render Performance
    bpy.data.scenes["Scene"].render.tile_x = blender_options.get_render_tile_x(
    )
    bpy.data.scenes["Scene"].render.tile_y = blender_options.get_render_tile_y(
    )

    bpy.data.scenes["Scene"].cycles.seed = 0
    bpy.data.scenes["Scene"].cycles.samples = 16
    bpy.data.scenes["Scene"].cycles.preview_samples = 16
    bpy.data.scenes["Scene"].cycles_curves.use_curves = True
    bpy.data.scenes["Scene"].cycles_curves.cull_backfacing = True
    bpy.data.scenes["Scene"].cycles.max_bounces = 8
    bpy.data.scenes["Scene"].cycles.min_bounces = 4
    bpy.data.scenes["Scene"].cycles.diffuse_bounces = 0
    bpy.data.scenes["Scene"].cycles.glossy_bounces = 1
    bpy.data.scenes["Scene"].cycles.transmission_bounces = 2
    bpy.data.scenes["Scene"].cycles.volume_bounces = 0
    bpy.data.scenes["Scene"].cycles.use_transparent_shadows = True
    bpy.data.scenes["Scene"].cycles.caustics_reflective = False
    bpy.data.scenes["Scene"].cycles.caustics_refractive = False
    bpy.data.scenes["Scene"].render.use_motion_blur = False
    bpy.data.scenes["Scene"].cycles.debug_use_spatial_splits = True
    bpy.data.scenes["Scene"].render.use_simplify = True
    bpy.data.scenes["Scene"].render.simplify_subdivision_render = 1
    bpy.data.scenes["Scene"].cycles.use_camera_cull = True

    # bpy.data.scenes["terrain"].cycles.use_camera_cull = True

    bpy.data.lamps[
        "MySun"].shadow_soft_size = blender_options.get_shadow_soft_size()
    bpy.data.lamps["MySun"].cycles.max_bounces = 16
    bpy.data.lamps["MySun"].cycles.cast_shadow = False
    bpy.data.lamps["MySun"].cycles.use_multiple_importance_sampling = False

    FilePaths().log_events("RENDER JOB\n")
    FilePaths().log_events("Args: " + str(sys.argv) + "\n")

    input = sys.argv[-3:]

    FilePaths().log_events("Input: " + str(input) + "\n")

    start, end, rid = int(input[0]), int(input[1]), str(input[2])
    max_frame = int(bpy.context.scene.frame_end)
    FilePaths().log_events("Maximum Frame: " + str(max_frame) + "\n")
    FilePaths().log_events("Start Frame: " + str(start) + "\n")
    FilePaths().log_events("End Frame: " + str(end) + "\n")
    FilePaths().log_events("Random ID: " + str(rid) + "\n")

    file_path = FilePaths()

    bpy.context.scene.camera = bpy.data.objects['MyCamera']
    # Get the scene context to render
    scene = bpy.context.scene

    # Directory path to store rendered frames
    fp = file_path.get_job_dir() + str(rid) + "/temp/"
    FilePaths().log_events("Rendered Stills Location: " + fp + "\n")

    # Define render file format
    scene.render.image_settings.file_format = 'PNG'  # set output format to .png
    FilePaths().log_events("Begin Rendering\n")
    while start <= end:
        rendered = 'part' + str(start).zfill(
            math.ceil(math.log(float(max_frame), 10)) + 1)
        scene.frame_set(int(start))
        scene.render.filepath = fp + rendered
        FilePaths().log_events("Rendering Frame: " + str(start) + " ---: " +
                               str(fp + rendered) + "\n")
        start += 1
        bpy.ops.render.render(write_still=True)

    # Render the frame to a still image
    FilePaths().log_events("Rendering Job Complete\n")