Пример #1
0
def have_data(parent, dirs, is_levels=False):
    """Checks that the directories expected to have input images are non-empty.

    Args:
        parent: class instance
        dirs (list[str]): Directories to be verified.
        is_levels (bool, optional): Whether or not the levels are being used
            instead of full-size images (usually only relevant in AWS renders).

    Returns:
        dict[str, bool]: Map of each directory and whether or not it is non-empty.
    """
    have_data = {}
    for dir in dirs:
        dir_key = dir
        if is_levels:
            # We need level 0 for thresholds
            dir = posixpath.join(dir, "level_0")
        log(glog.green(f"Looking for valid images in {dir}..."))
        sample_frame = None
        if parent.s3_sample_frame and (
            dir == parent.path_video_color
            or dir.startswith(parent.path_video_color_levels)
        ):
            sample_frame = parent.s3_sample_frame

        have_data[dir_key] = check_image_existence(dir, sample_frame) != ""
        if not have_data[dir_key]:
            log(glog.yellow(f"No valid images found in {dir}"))
    return have_data
Пример #2
0
def download_frame_s3(parent, dir_s3, dir_local, frames_s3_names, is_levels=False):
    frame_s3_name_first = frames_s3_names[0]
    s3_sample_frame = parent.s3_sample_frame
    if s3_sample_frame:
        if s3_sample_frame in frames_s3_names:
            frame_s3_name = s3_sample_frame
        else:
            log(glog.yellow(f"Cannot find {s3_sample_frame} in {frames_s3_names}"))
            return
    else:
        frame_s3_name = frame_s3_name_first

    fn = f"{frame_s3_name}.tar"
    if is_levels:
        recursive = True
        levels = parent.aws_util.s3_ls(dir_s3, run_silently=not verbose)
        levels = list(filter(None, levels))  # removes empty results from ls
        t = "levels"
        srcs = [posixpath.join(dir_s3, level, fn) for level in levels]
        dsts = [posixpath.join(dir_local, level, fn) for level in levels]
    else:
        recursive = False
        t = "full-size"
        srcs = [posixpath.join(dir_s3, fn)]
        dsts = [posixpath.join(dir_local, fn)]
    exclude = None
    include = None

    print(glog.green(f"Downloading {fn} {t} from {dir_s3}..."))
    for src, dst in zip(srcs, dsts):
        parent.aws_util.s3_cp(
            src, dst, exclude, include, recursive=False, run_silently=not verbose
        )
    unpack_tars(parent, dir_local, recursive)
Пример #3
0
def close_section_logs():
    """Cleanup method to release file handles on logging files."""
    for s in sections:
        if "log_reader" in dir(s):
            if FLAGS.verbose:
                print(glog.green(f"Closing {s.log_reader.log_file}"))
            s.log_reader.close_log_file()
Пример #4
0
def unpack_tars(parent, dir_local, recursive=True):
    needle = "*/" if recursive else "*"
    tar_files = list(glob.iglob(f"{dir_local}/*{needle}*.tar", recursive=False))
    for tar_file in tar_files:
        log(glog.green(f"Unpacking {tar_file}..."))
        t = tarfile.open(tar_file)
        t.extractall(os.path.dirname(tar_file))
        t.close()
        os.remove(tar_file)
Пример #5
0
    def closeEvent(self, event):
        """Callback event handler for the UI being closed.

        Args:
            event (QEvent): Caught instance of the closing event.
        """
        print(glog.green("Closing app..."))
        close_section_logs()
        event.accept()
Пример #6
0
def set_full_size_widths(parent):
    camera_ref = parent.cameras[0]
    for t in ["bg", "video"]:
        p = getattr(parent, f"path_{t}_color", None)
        p_local = os.path.join(p, camera_ref)
        full_size_width = get_local_frame_width(p_local)
        if full_size_width < 0:
            full_size_width = parent.rig_width
        setattr(parent, f"{t}_full_size_width", full_size_width)
        print(glog.green(f"Local {t} full-size width: {full_size_width}"))
Пример #7
0
def get_rigs(parent):
    """Gets filenames corresponding to the project rigs.

    Args:
        parent: class instance

    Returns:
        list[str]: Rig filenames (assumed to be named *.json).
    """
    log(glog.green(f"Looking for rigs in {parent.path_rigs}..."))
    ps = list(glob.iglob(f"{parent.path_rigs}/**.json", recursive=False))
    if len(ps) == 0:
        log(glog.yellow(f"No rigs found in {parent.path_rigs}"))
    return ps
Пример #8
0
def get_local_frame_names(dir):
    """Finds all the frames in a directory.

    Args:
        dir (str): Path to a local directory.

    Returns:
        list[str]: Sorted list of frame names in the directory. If an invalid directory
            is passed in, an empty result is returned.
    """
    if os.path.isdir(dir):
        log(glog.green(f"Looking for local frames in {dir}"))
        frames = list_only_visible_files(dir)
        return [get_stem(f) for f in frames if is_frame(f)]
    return []
Пример #9
0
    def configure_farm(self):
        """Sets up credentials in the terminal for an AWS render."""
        project_address = Address(self.project_root)
        self.is_aws = project_address.protocol == "s3"
        self.is_lan = project_address.protocol == "smb"
        if self.is_aws:
            print(glog.green("Configuring AWS parameters..."))
            self.aws_util = AWSUtil(config.DOCKER_AWS_CREDENTIALS,
                                    s3_url=self.project_root)
            self.aws_util.configure_shell(run_silently=not self.verbose)
            fe = self.dlg.gb_file_explorer
            fe.setTitle(f"{fe.title()} (cache)")

            kube_workers = self.aws_util.ec2_get_kube_worker_instances()
            common.set_aws_workers(kube_workers)
Пример #10
0
def print_frame_range(parent, suffix):
    """Displays frame range.

    Args:
        parent: class instance
        suffix (str): Prefixed text to display before the frames.
    """
    ff = getattr(parent, f"frames_{suffix}", None)
    if not ff:
        return
    elif len(ff) == 0:
        frame_range = ""
    elif len(ff) == 1:
        frame_range = f"{ff[0]}"
    else:
        frame_range = f"{ff[0]}, {ff[-1]}"
    log(glog.green(f"Frames ({suffix}): [{frame_range}]"))
Пример #11
0
def update_frame_names(
    parent, data_types=None, image_types=None, update_local=True, update_s3=True
):
    """Updates frame names for given data types

    Args:
        parent: class instance
        data_types (list[str]): List of data types.
        image_types (list[str]): List of image types.
    """
    global verbose
    verbose = parent.verbose

    log(glog.green("Getting frame names..."))
    glog.check(len(parent.cameras) > 0, "No cameras found!")
    camera_ref = parent.cameras[0]
    if not data_types:
        data_types = ["bg", "video"]
    if not image_types:
        image_types = ["color", "color_levels", "disparity", "disparity_levels", "bin"]
    for t in data_types:
        for d in image_types:
            if t == "bg" and d == "bin":
                continue
            suffix = f"{t}_{d}" if d != "bin" else d
            p = getattr(parent, f"path_{suffix}", None)
            if "_levels" in d:
                p = posixpath.join(p, "level_0")

            if update_local:
                p_local = posixpath.join(p, camera_ref)
                setattr(
                    parent,
                    f"frames_{suffix}",
                    get_frame_names(parent, p_local, is_cache=True),
                )
                print_frame_range(parent, suffix)
            if update_s3 and parent.is_aws:
                # Cached frames are eventually synced to S3, so any frame in the
                # cache should be added to the S3 frames
                frames_s3 = get_frame_names(parent, p, is_cache=False)
                frames_cache = getattr(parent, f"frames_{suffix}", None)
                frames_s3 = sorted(merge_lists(frames_s3, frames_cache))
                setattr(parent, f"frames_{suffix}_s3", frames_s3)
                print_frame_range(parent, f"{suffix}_s3")
Пример #12
0
def get_rigs_s3(parent):
    """Downloads a calibrated rig from S3 if it exists.

    Args:
        parent: class instance

    Returns:
        str: Local filename of the calibrated rig.
    """
    path_rigs_s3 = posixpath.join(parent.project_root, "rigs")

    log(glog.green(f"Downloading rigs from {path_rigs_s3}..."))
    parent.aws_util.s3_cp(
        f"{path_rigs_s3}/", f"{parent.path_rigs}/", run_silently=not verbose
    )

    # If there are rigs in S3 they should now be downloaded to local directory
    return get_rigs(parent)
Пример #13
0
def resize_local_frame(parent, dir_full, dir_level, rig_ref):
    glog.check(
        len(parent.cameras) > 0,
        f"No cameras found. Cannot resize local frame {dir_full}",
    )
    dir_cam = posixpath.join(dir_full, parent.cameras[0])
    frames = list_only_visible_files(dir_cam)
    glog.check_gt(len(frames), 0, f"No frames found in {dir_cam}")
    if parent.s3_sample_frame and dir_full == parent.path_video_color:
        frame_name = parent.s3_sample_frame
    else:
        frame_name, _ = os.path.splitext(sorted(frames)[0])
    frame_num = int(frame_name)

    log(glog.green(f"Resizing full-size frame {frame_name} in {dir_full}..."))
    with open(rig_ref, "r") as f:
        rig = json.load(f)
        resize_frames(dir_full, dir_level, rig, frame_num, frame_num)
Пример #14
0
def get_s3_frame_names(parent, dir):
    """Finds all the frames in an S3 directory.

    Args:
        parent: class instance
        dir (str): Path to the S3 directory being scanned.

    Returns:
        list[str]: Sorted list of frame names in the directory.
    """
    if not dir.startswith("s3://"):
        path_project_s3 = parent.project_root
        dir_s3 = dir.replace(parent.path_project, path_project_s3)
    else:
        dir_s3 = dir
    log(glog.green(f"Looking for S3 frames in {dir_s3}"))
    frames = parent.aws_util.s3_ls(dir_s3, run_silently=not verbose)
    frames = [f for f in frames if f.endswith(".tar")]
    return sorted(get_stem(f) for f in frames if is_frame(f))
Пример #15
0
def verify(parent, save_frame_ranges=True):
    """Performs all validation on data. Warnings are displayed if an unexpected structure
    is encountered.
    """
    global verbose
    verbose = parent.verbose
    if not verbose:
        print(glog.green("\nVerifying data (may take a few seconds)..."))

    # Look for a rig
    rig_fns = get_rigs(parent)
    if not rig_fns and parent.is_aws:  # no local rigs, check S3
        rig_fns = get_rigs_s3(parent)
    glog.check(len(rig_fns) > 0, "Cannot launch UI without any rig")
    rig_ref = rig_fns[0]
    parent.cameras = get_cameras(parent, rig_ref)
    parent.rig_width = get_rig_width(parent, rig_ref)

    # We need full-size images if we want to (re-)calibrate
    have_full_color = have_color(parent)
    if not all(have_full_color.values()) and parent.is_aws:  # no local color, check S3
        if parent.s3_ignore_fullsize_color:
            log(glog.yellow(f"Ignoring full-size color image downloads from S3..."))
        else:
            have_full_color = get_data_s3(parent, have_full_color)

    # We have a rig, but we need color levels to run thresholds for depth
    # estimation
    have_level_color = have_color(parent, is_levels=True)

    if not all(have_level_color.values()) and parent.is_aws:  # no local color, check S3
        have_level_color = get_data_s3(parent, have_level_color, is_levels=True)

    # Check what color types have full-size but not level color
    map_level_full = dict(zip(have_level_color, have_full_color))
    for dir_level, has_level_color in have_level_color.items():
        if not has_level_color:
            log(glog.yellow(f"No level colors in {dir_level}"))
            dir_full = map_level_full[dir_level]
            if not have_full_color[dir_full]:
                log(
                    glog.yellow(
                        f"No full-size colors in {dir_full}. Cannot create levels"
                    )
                )
                continue
            else:
                resize_local_frame(parent, dir_full, dir_level, rig_ref)

    have_level_color = have_color(parent, is_levels=True)
    if not have_level_color[parent.path_bg_color_levels]:
        log(glog.yellow(f"No background frames found. Cannot render background"))

    if not have_level_color[parent.path_video_color_levels]:
        log(glog.yellow(f"No video frames found. Cannot render video"))

    if not any(have_level_color.values()) and not any(have_full_color.values()):
        glog.check(False, f"No colors. Cannot calibrate")

    # Download disparities from S3
    if parent.is_aws:
        download_s3_disparities(parent)

    # Get frames for color, color levels, disparity (background and video)
    if save_frame_ranges:
        update_frame_names(parent)