Example #1
0
def apply_patch(ip, file_path, user='******', password='******'):
    file_name = os.path.basename(file_path)
    patch_name, patch_version = file_name.split('_')[0:2]
    # Check if patch has already been installed
    ssh_client = RemoteClient(ip, user, password)
    patch_info = get_patch_info(ssh_client, patch_version)
    if patch_info and patch_info['Installed'] == 'Yes':
        LOG.info('Patch %s has already been installed, Skip installing it.',
                 file_name)
        return patch_info
    if not patch_info:
        LOG.info('Adding patch %s' % file_name)
        remote_path = os.path.join('/tmp', file_name)
        ssh_client.scp(file_name, '/tmp')
        ssh_client.run('viopatch add -l %s' % remote_path,
                       sudo=True,
                       raise_error=True)
        # Clean up patch file in case oms disk becomes full
        ssh_client.run('rm -f %s' % remote_path)
    patch_info = get_patch_info(ssh_client, patch_version)
    if not patch_info:
        raise NotSupportedError('Failed to add Patch %s' % file_name)
    LOG.info('Start to install patch %s' % file_name)
    ssh_client.run('viopatch install --patch %s --version %s --as-infra' %
                   (patch_name, patch_version),
                   sudo=True,
                   raise_error=True,
                   feed_input='Y')
    patch_info = get_patch_info(ssh_client, patch_version)
    if patch_info['Installed'] != 'Yes':
        LOG.error('Failed to install patch %s' % patch_info)
        raise NotCompletedError('Failed to install patch %s.' % file_name)
    LOG.info('Successfully Installed patch %s' % file_name)
    return patch_info
Example #2
0
def config_image(config_parser, p_vip, user_name, password, tenant_name):
    # Get the default image.
    session = get_session(p_vip=p_vip,
                          username=user_name,
                          password=password,
                          project_name=tenant_name,
                          domain_name=DEFAULT_DOMAIN_ID)
    glance = GlanceClient(version='2', session=session)
    count = 0
    default_image = None
    alt_image = None
    for image in glance.images.list():
        count += 1
        if image.name == IMAGE_NAME_4_1:
            default_image = image
        else:
            alt_image = image
    if count == 0:
        raise NotSupportedError('At least 1 image in glance is required.')
    default_image = default_image or alt_image
    LOG.info('Use image %s as default image in tempest', default_image.name)
    alt_image = alt_image or default_image
    LOG.info('Use image %s as alter image in tempest', alt_image.name)
    config_parser.set('compute', 'image_ref', default_image.id)
    config_parser.set('compute', 'image_ref_alt', alt_image.id)
    config_parser.set('heat_plugin', 'image_ref', default_image.id)
    config_parser.set('heat_plugin', 'minimal_image_ref', default_image.id)
Example #3
0
def config_compute(config_parser, private_vip, user_name, password,
                   tenant_name, endpoint_type='internalURL',
                   min_compute_nodes=1):
    auth_url = get_auth_url(private_vip)
    nova = nova_client.Client('2', user_name, password, tenant_name, auth_url,
                              insecure=True, endpoint_type=endpoint_type)
    # Get the default image.
    images = nova.images.list()
    if len(images) == 0:
        raise NotSupportedError('At least 1 image in glance is required.')
    default_image = images[0]
    for image in images:
        if image.name == IMAGE_NAME:
            default_image = image
            images.remove(image)
    LOG.info('Use image %s as default image in tempest', default_image.name)
    alt_image = images[0] if len(images) > 0 else default_image
    LOG.info('Use image %s as alter image in tempest', alt_image.name)
    config_parser.set('compute', 'image_ref', default_image.id)
    config_parser.set('compute', 'image_ref_alt', alt_image.id)
    # Create the flavors
    m1 = create_if_not_exist(nova.flavors, 'flavor', FLAVOR1_NAME, ram=512,
                             vcpus=1, disk=10, is_public=True)
    config_parser.set('compute', 'flavor_ref', m1.id)
    config_parser.set('orchestration', 'instance_type', FLAVOR1_NAME)
    m2 = create_if_not_exist(nova.flavors, 'flavor', FLAVOR2_NAME, ram=1024,
                             vcpus=2, disk=10, is_public=True)
    config_parser.set('compute', 'flavor_ref_alt', m2.id)
    config_parser.set('compute', 'min_compute_nodes', min_compute_nodes)
    config_parser.set('compute-feature-enabled', 'pause', 'false')
Example #4
0
def deploy_vapp(vc_host,
                vc_user,
                vc_password,
                dc,
                cluster,
                ds,
                network,
                ova_path,
                ntp_server=None,
                viouser_pwd='vmware',
                log_path=None,
                ip=None,
                netmask=None,
                gateway=None,
                dns=None,
                ovf_tool_path=None):
    if not ovf_tool_path:
        ovf_tool_path = get_ovf_tool_path()
        if not ovf_tool_path:
            ovf_tool_path = DEFAULT_LOCAL_OVF_TOOL_PATH
    if not os.path.isfile(ovf_tool_path):
        LOG.error('ovftool not found.')
        raise NotSupportedError('ovftool not found')
    if not log_path:
        log_path = os.getcwd()

    ntp_config = '--prop:ntpServer=%s ' % ntp_server if ntp_server else ''
    dns_config = '--prop:vami.DNS.management-server=%s ' % dns if dns else ''
    # deploy ova and poweron vm
    # TODO: implement deploy with dhcp
    cmd = ('"%s" --X:"logFile"="%s/deploy_oms.log" '
           '--vService:"installation"='
           '"com.vmware.vim.vsm:extension_vservice" '
           '--acceptAllEulas --noSSLVerify --powerOn '
           '--datastore=%s '
           '-dm=thin '
           '--net:"VIO Management Server Network"="%s" '
           '--prop:vami.ip0.management-server=%s '
           '--prop:vami.netmask0.management-server=%s '
           '--prop:vami.gateway.management-server=%s '
           '%s '
           '--prop:viouser_passwd=%s '
           '%s %s vi://%s:%s@%s/%s/host/%s'
           '' % (ovf_tool_path, log_path, ds, network, ip, netmask, gateway,
                 dns_config, viouser_pwd, ntp_config, ova_path, vc_user,
                 vc_password, vc_host, dc, cluster))
    LOG.info('Start to deploy management server.')
    # LOG.info(cmd)
    # exit_code = os.system(cmd)
    exit_code = shell.local(cmd)[0]
    if exit_code:
        LOG.warning('Failed to deploy vApp. Retry deploying after 3 minutes.')
        time.sleep(60 * 3)
        shell.local(cmd, raise_error=True)
    wait_for_mgmt_service(ip, vc_user, vc_password)
    LOG.info('Successfully deployed management server.')
Example #5
0
    def setPhoto(self, image):
        """
        Convert a color or grayscale image to a pixmap and assign it to the photo object.

        :param image: Image to be displayed. The image is assumed to be in color or grayscale
                      format of length uint8 or uint16.
        :return: -
        """

        # Indicate that an image is being loaded.
        self.image_loading_busy = True

        # Convert the image into uint8 format. If the frame type is uint16, values correspond to
        # 16bit resolution.
        if image.dtype == uint16:
            image_uint8 = (image / 256.).astype(uint8)
        elif image.dtype == uint8:
            image_uint8 = image.astype(uint8)
        else:
            raise NotSupportedError(
                "Attempt to set a photo in frame viewer with type neither"
                " uint8 nor uint16")

        self.shape_y = image_uint8.shape[0]
        self.shape_x = image_uint8.shape[1]

        # Normalize the frame brightness.
        image_uint8 = normalize(image_uint8,
                                None,
                                alpha=0,
                                beta=255,
                                norm_type=NORM_MINMAX)

        # The image is monochrome:
        if len(image_uint8.shape) == 2:
            qt_image = QtGui.QImage(image_uint8, self.shape_x, self.shape_y,
                                    self.shape_x,
                                    QtGui.QImage.Format_Grayscale8)
        # The image is RGB color.
        else:
            qt_image = QtGui.QImage(image_uint8, self.shape_x, self.shape_y,
                                    3 * self.shape_x,
                                    QtGui.QImage.Format_RGB888)
        pixmap = QtGui.QPixmap(qt_image)

        if pixmap and not pixmap.isNull():
            self._empty = False
            self._photo.setPixmap(pixmap)
        else:
            self._empty = True
            self._photo.setPixmap(QtGui.QPixmap())

        # Release the image loading flag.
        self.image_loading_busy = False
Example #6
0
    def frame_score(self):
        """
        Compute the frame quality values and normalize them such that the best value is 1.

        :return: -
        """

        if self.configuration.rank_frames_method == "xy gradient":
            method = Miscellaneous.local_contrast
        elif self.configuration.rank_frames_method == "Laplace":
            method = Miscellaneous.local_contrast_laplace
        elif self.configuration.rank_frames_method == "Sobel":
            method = Miscellaneous.local_contrast_sobel
        else:
            raise NotSupportedError("Ranking method " +
                                    self.configuration.rank_frames_method +
                                    " not supported")

        # For all frames compute the quality with the selected method.
        if method != Miscellaneous.local_contrast_laplace:
            for frame_index in range(self.frames.number):
                frame = self.frames.frames_mono_blurred(frame_index)
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Rank all frames",
                        int((frame_index / self.number) * 100.))
                self.frame_ranks.append(
                    method(frame, self.configuration.rank_frames_pixel_stride))
        else:
            for frame_index in range(self.frames.number):
                frame = self.frames.frames_mono_blurred_laplacian(frame_index)
                # self.frame_ranks.append(mean((frame - frame.mean())**2))
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Rank all frames",
                        int((frame_index / self.number) * 100.))
                self.frame_ranks.append(meanStdDev(frame)[1][0][0])

        if self.progress_signal is not None:
            self.progress_signal.emit("Rank all frames", 100)
        # Sort the frame indices in descending order of quality.
        self.quality_sorted_indices = sorted(range(len(self.frame_ranks)),
                                             key=self.frame_ranks.__getitem__,
                                             reverse=True)

        # Set the index of the best frame, and normalize all quality values.
        self.frame_ranks_max_index = self.quality_sorted_indices[0]
        self.frame_ranks_max_value = self.frame_ranks[
            self.frame_ranks_max_index]
        self.frame_ranks /= self.frame_ranks_max_value
Example #7
0
def get_patch_info(output, patch_version):
    lines = output.split('\n')[2:]
    for line in lines:
        if line.strip():
            items = line.split()
            if patch_version == items[1]:
                LOG.debug('Find patch info: %s' % line)
                return {
                    'Name': items[0],
                    'Version': items[1],
                    'Type': items[2],
                    'Installed': items[-1]
                }
    raise NotSupportedError('Patch %s not added' % patch_version)
Example #8
0
    def __init__(self, parent_gui, frame, message, signal_finished):
        """
        Initialization of the widget.

        :param parent_gui: Parent GUI object
        :param frame: Background image on which the patch is superimposed. Usually, the mean frame
                      is used for this purpose.
        :param message: Message to tell the user what to do.
        :param signal_finished: Qt signal with signature (int, int, int, int) sending the
                                coordinate bounds (y_low, y_high, x_low, x_high) of the patch
                                selected, or (0, 0, 0, 0) if unsuccessful.
        """

        super(RectangularPatchEditorWidget, self).__init__(parent_gui)
        self.setupUi(self)

        self.parent_gui = parent_gui

        # Convert the frame into uint8 format. If the frame type is uint16, values
        # correspond to 16bit resolution.
        if frame.dtype == uint16 or frame.dtype == int32:
            self.frame = (frame / 256).astype(uint8)
        elif frame.dtype == uint8:
            self.frame = frame
        else:
            raise NotSupportedError(
                "Attempt to set a photo in frame viewer with type neither"
                " uint8 nor uint16 not int32")

        self.message = message
        self.signal_finished = signal_finished

        self.viewer = RectangularPatchEditor(self.frame)
        self.verticalLayout.insertWidget(0, self.viewer)

        self.messageLabel.setText(self.message)
        self.messageLabel.setStyleSheet('color: red')
        self.buttonBox.accepted.connect(self.done)
        self.buttonBox.rejected.connect(self.reject)
        self.shape_y = None
        self.shape_x = None
        self.y_low = None
        self.y_high = None
        self.x_low = None
        self.x_high = None
    def align_frames(self):
        """
        Compute the displacement of all frames relative to the sharpest frame using the alignment
        rectangle.

        :return: -
        """

        if self.configuration.align_frames_mode == "Surface":
            # For "Surface" mode the alignment rectangle has to be selected first.
            if self.x_low_opt is None:
                raise WrongOrderingError(
                    "Method 'align_frames' is called before 'select_alignment_rect'"
                )

            # From the sharpest frame cut out the alignment rectangle. The shifts of all other frames
            #  will be computed relativ to this patch.
            self.reference_window = self.frames.frames_mono_blurred(
                self.frame_ranks_max_index)[
                    self.y_low_opt:self.y_high_opt,
                    self.x_low_opt:self.x_high_opt].astype(int32)
            self.reference_window_shape = self.reference_window.shape

        elif self.configuration.align_frames_mode == "Planet":
            # For "Planetary" mode compute the center of gravity for the reference image.
            cog_reference_y, cog_reference_x = AlignFrames.center_of_gravity(
                self.frames.frames_mono_blurred(self.frame_ranks_max_index))

        else:
            raise NotSupportedError("Frame alignment mode '" +
                                    self.configuration.align_frames_mode +
                                    "' not supported")

        # Initialize a list which for each frame contains the shifts in y and x directions.
        self.frame_shifts = [None] * self.frames.number

        # Initialize lists with info on failed frames.
        self.dev_r_list = []
        self.failed_index_list = []

        # Initialize a counter of processed frames for progress bar signalling. It is set to one
        # because in the loop below the optimal frame is not counted.
        number_processed = 1

        # Loop over all frames. Begin with the sharpest (reference) frame
        for idx in chain(reversed(range(self.frame_ranks_max_index + 1)),
                         range(self.frame_ranks_max_index,
                               self.frames.number)):

            if idx == self.frame_ranks_max_index:
                # For the sharpest frame the displacement is 0 because it is used as the reference.
                self.frame_shifts[idx] = [0, 0]
                # Initialize two variables which keep the shift values of the previous step as
                # the starting point for the next step. This reduces the search radius if frames are
                # drifting.
                dy_min_cum = dx_min_cum = 0

            # For all other frames: Compute the global shift, using the "blurred" monochrome image.
            else:
                # After every "signal_step_size"th frame, send a progress signal to the main GUI.
                if self.progress_signal is not None and number_processed % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Align all frames",
                        int((number_processed / self.frames.number) * 100.))

                frame = self.frames.frames_mono_blurred(idx)

                if self.configuration.align_frames_mode == "Planet":
                    # In Planetary mode the shift of the "center of gravity" of the image is
                    # computed. This algorithm cannot fail.
                    cog_frame = AlignFrames.center_of_gravity(frame)
                    self.frame_shifts[idx] = [
                        cog_reference_y - cog_frame[0],
                        cog_reference_x - cog_frame[1]
                    ]
                    number_processed += 1
                    continue

                # In "Surface" mode three alignment algorithms can be chosen from. In each case
                # the result is the shift vector [dy_min, dx_min]. The second and third algorithm
                # do a local search. It can fail if within the search radius no minimum is found.
                # The first algorithm (cross-correlation) can fail as well, but in this case there
                # is no indication that this happened.
                elif self.configuration.align_frames_method == "Translation":
                    # The shift is computed with cross-correlation. Cut out the alignment patch and
                    # compute its translation relative to the reference.
                    frame_window = self.frames.frames_mono_blurred(
                        idx)[self.y_low_opt:self.y_high_opt,
                             self.x_low_opt:self.x_high_opt]
                    self.frame_shifts[idx] = Miscellaneous.translation(
                        self.reference_window, frame_window,
                        self.reference_window_shape)
                    continue

                elif self.configuration.align_frames_method == "RadialSearch":
                    # Spiral out from the shift position of the previous frame and search for the
                    # local optimum.
                    [dy_min, dx_min], dev_r = Miscellaneous.search_local_match(
                        self.reference_window,
                        frame,
                        self.y_low_opt - dy_min_cum,
                        self.y_high_opt - dy_min_cum,
                        self.x_low_opt - dx_min_cum,
                        self.x_high_opt - dx_min_cum,
                        self.configuration.align_frames_search_width,
                        self.configuration.align_frames_sampling_stride,
                        sub_pixel=False)
                elif self.configuration.align_frames_method == "SteepestDescent":
                    # Spiral out from the shift position of the previous frame and search for the
                    # local optimum.
                    [dy_min, dx_min
                     ], dev_r = Miscellaneous.search_local_match_gradient(
                         self.reference_window, frame,
                         self.y_low_opt - dy_min_cum,
                         self.y_high_opt - dy_min_cum,
                         self.x_low_opt - dx_min_cum,
                         self.x_high_opt - dx_min_cum,
                         self.configuration.align_frames_search_width,
                         self.configuration.align_frames_sampling_stride,
                         self.dev_table)
                else:
                    raise NotSupportedError("Frame alignment method " +
                                            configuration.align_frames_method +
                                            " not supported")

                # Update the cumulative shift values to be used as starting point for the
                # next frame.
                dy_min_cum += dy_min
                dx_min_cum += dx_min
                self.frame_shifts[idx] = [dy_min_cum, dx_min_cum]

                # In "Surface" mode shift computation can fail if no minimum is found within
                # the pre-defined search radius.
                if len(dev_r) > 2 and dy_min == 0 and dx_min == 0:
                    self.failed_index_list.append(idx)
                    self.dev_r_list.append(dev_r)
                    continue

                # If the alignment window gets too close to a frame edge, move it away from
                # that edge by half the border width. First check if the reference window still
                # fits into the shifted frame.
                if self.shape[0] - abs(
                        dy_min_cum) - 2 * self.configuration.align_frames_search_width - \
                        self.configuration.align_frames_border_width < \
                        self.reference_window_shape[0] or self.shape[1] - abs(
                        dx_min_cum) - 2 * self.configuration.align_frames_search_width - \
                        self.configuration.align_frames_border_width < \
                        self.reference_window_shape[1]:
                    raise ArgumentError(
                        "Frame stabilization window does not fit into"
                        " intersection")
                new_reference_window = False
                # Start with the lower y edge.
                while self.y_low_opt - dy_min_cum < \
                        self.configuration.align_frames_search_width + \
                        self.configuration.align_frames_border_width / 2:
                    self.y_low_opt += ceil(
                        self.configuration.align_frames_border_width / 2.)
                    self.y_high_opt += ceil(
                        self.configuration.align_frames_border_width / 2.)
                    new_reference_window = True
                # Now the upper y edge.
                while self.y_high_opt - dy_min_cum > self.shape[
                    0] - self.configuration.align_frames_search_width - \
                        self.configuration.align_frames_border_width / 2:
                    self.y_low_opt -= ceil(
                        self.configuration.align_frames_border_width / 2.)
                    self.y_high_opt -= ceil(
                        self.configuration.align_frames_border_width / 2.)
                    new_reference_window = True
                # Now the lower x edge.
                while self.x_low_opt - dx_min_cum < \
                        self.configuration.align_frames_search_width + \
                        self.configuration.align_frames_border_width / 2:
                    self.x_low_opt += ceil(
                        self.configuration.align_frames_border_width / 2.)
                    self.x_high_opt += ceil(
                        self.configuration.align_frames_border_width / 2.)
                    new_reference_window = True
                # Now the upper x edge.
                while self.x_high_opt - dx_min_cum > self.shape[
                    1] - self.configuration.align_frames_search_width - \
                        self.configuration.align_frames_border_width / 2:
                    self.x_low_opt -= ceil(
                        self.configuration.align_frames_border_width / 2.)
                    self.x_high_opt -= ceil(
                        self.configuration.align_frames_border_width / 2.)
                    new_reference_window = True
                # If the window was moved, update the "reference_window".
                if new_reference_window:
                    self.reference_window = self.frames.frames_mono_blurred(
                        self.frame_ranks_max_index)[
                            self.y_low_opt:self.y_high_opt,
                            self.x_low_opt:self.x_high_opt].astype(int32)

                number_processed += 1

        if self.progress_signal is not None:
            self.progress_signal.emit("Align all frames", 100)

        # Compute the shape of the area contained in all frames in the form [[y_low, y_high],
        # [x_low, x_high]]
        self.intersection_shape = [[
            max(b[0] for b in self.frame_shifts),
            min(b[0] for b in self.frame_shifts) + self.shape[0]
        ],
                                   [
                                       max(b[1] for b in self.frame_shifts),
                                       min(b[1] for b in self.frame_shifts) +
                                       self.shape[1]
                                   ]]

        if len(self.failed_index_list) > 0:
            raise InternalError("No valid shift computed for " +
                                str(len(self.failed_index_list)) +
                                " frames: " + str(self.failed_index_list))
Example #10
0
def config_identity(config_parser,
                    p_vip,
                    admin_user_name,
                    admin_pwd,
                    admin_tenant_name,
                    creds_provider,
                    default_user_name=None,
                    default_pwd=None,
                    alt_user_name=None,
                    alt_pwd=None):
    uri_v3 = get_auth_url(p_vip, 'v3')
    uri_v2 = get_auth_url(p_vip)
    keystone = get_keystone_client(p_vip=p_vip,
                                   username=admin_user_name,
                                   password=admin_pwd,
                                   project_name=admin_tenant_name,
                                   domain_name=DEFAULT_DOMAIN_ID)
    config_parser.set('identity', 'uri_v3', uri_v3)
    config_parser.set('identity', 'uri', uri_v2)
    config_parser.set('identity', 'auth_version', 'v3')
    config_parser.set('auth', 'admin_project_name', admin_tenant_name)
    config_parser.set('auth', 'admin_password', admin_pwd)
    config_parser.set('auth', 'admin_username', admin_user_name)
    # config heat_plugin
    auth_url = get_auth_url2(p_vip, 'v3')
    config_parser.set('heat_plugin', 'auth_url', auth_url)
    config_parser.set('heat_plugin', 'auth_version', '3')
    config_parser.set('heat_plugin', 'admin_username', admin_user_name)
    config_parser.set('heat_plugin', 'admin_password', admin_pwd)
    # Create tempest test role
    test_role = create_if_not_exist(keystone.roles, 'role', ROLE_NAME)
    config_parser.set('auth', 'tempest_roles', ROLE_NAME)
    # Both SQL backend and LDAP backend is Default
    config_parser.set('auth', 'admin_domain_name', 'Default')
    default_domain = keystone.domains.get(DEFAULT_DOMAIN_ID)
    default_tenant = create_if_not_exist(keystone.projects,
                                         'project',
                                         TENANT_NAME,
                                         domain=default_domain)
    # config_parser.set('heat_plugin', 'username', 'admin')
    # config_parser.set('heat_plugin', 'password', 'vmware')
    # config_parser.set('heat_plugin', 'project_name', 'admin')
    # config_parser.set('heat_plugin', 'user_domain_name', 'Default')
    # config_parser.set('heat_plugin', 'project_domain_name', 'Default')
    try:
        heat_user = create_if_not_exist(keystone.users,
                                        'user',
                                        'demo',
                                        password='******',
                                        default_project=default_tenant)
        grant_role_on_project(keystone, default_tenant, heat_user, test_role)
        config_parser.set('heat_plugin', 'username', 'admin')
        config_parser.set('heat_plugin', 'password', 'vmware')
        config_parser.set('heat_plugin', 'project_name', 'admin')
        config_parser.set('heat_plugin', 'user_domain_name', 'Default')
        config_parser.set('heat_plugin', 'project_domain_name', 'Default')
    except Exception:
        config_parser.set('heat_plugin', 'username', 'admin')
        config_parser.set('heat_plugin', 'password', 'vmware')
        config_parser.set('heat_plugin', 'project_name', 'admin')
        config_parser.set('heat_plugin', 'user_domain_name', 'local')
        config_parser.set('heat_plugin', 'project_domain_name', 'local')
    if creds_provider in [LEGACY_PROVIDER, PRE_PROVISIONED_PROVIDER]:
        # Create default tenant and user
        default_domain = keystone.domains.get(DEFAULT_DOMAIN_ID)
        default_tenant = create_if_not_exist(keystone.projects,
                                             'project',
                                             TENANT_NAME,
                                             domain=default_domain)
        default_user = create_if_not_exist(keystone.users,
                                           'user',
                                           default_user_name,
                                           password=default_pwd,
                                           tenant_id=default_tenant.id)

        grant_role_on_project(keystone, default_tenant, default_user,
                              test_role)
        # Create alter tenant and user
        alt_tenant = create_if_not_exist(keystone.projects,
                                         'project',
                                         ALT_TENANT_NAME,
                                         domain=default_domain)
        alt_user = create_if_not_exist(keystone.users,
                                       'user',
                                       alt_user_name,
                                       password=alt_pwd,
                                       tenant_id=alt_tenant.id)

        grant_role_on_project(keystone, alt_tenant, alt_user, test_role)
        if LEGACY_PROVIDER == creds_provider:
            # Legacy provider can only be used before Newton release.
            config_parser.set('identity', 'tenant_name', TENANT_NAME)
            config_parser.set('identity', 'username', default_user_name)
            config_parser.set('identity', 'password', default_pwd)
            config_parser.set('identity', 'alt_tenant_name', ALT_TENANT_NAME)
            config_parser.set('identity', 'alt_username', alt_user_name)
            config_parser.set('identity', 'alt_password', alt_pwd)
        elif PRE_PROVISIONED_PROVIDER == creds_provider:
            accounts = list()
            accounts.append(
                add_account(default_user_name,
                            default_pwd,
                            TENANT_NAME,
                            default_tenant.id,
                            roles=[ROLE_NAME]))
            accounts.append(
                add_account(alt_user_name,
                            alt_pwd,
                            ALT_TENANT_NAME,
                            alt_tenant.id,
                            roles=[ROLE_NAME]))
            project_id = None
            for project in keystone.projects.list():
                if project.name == admin_tenant_name \
                        and project.domain_id == DEFAULT_DOMAIN_ID:
                    project_id = project.id
                    break
            if not project_id:
                raise NotFoundError('Project %s not found' % admin_tenant_name)
            accounts.append(
                add_account(admin_user_name,
                            admin_pwd,
                            admin_tenant_name,
                            project_id,
                            roles=['admin']))
            test_accounts_file = os.path.join(os.getcwd(), TEMPEST_DIR,
                                              'etc/accounts.yaml')
            with open(test_accounts_file, 'w') as fh:
                yaml.dump(accounts,
                          fh,
                          default_flow_style=False,
                          default_style=False,
                          indent=2,
                          encoding='utf-8',
                          allow_unicode=True)
            config_parser.set('auth', 'test_accounts_file', test_accounts_file)
        config_parser.set('auth', 'use_dynamic_credentials', 'false')
        config_parser.set('auth', 'create_isolated_networks', 'false')
    elif creds_provider == DYNAMIC_PROVIDER:
        config_parser.set('auth', 'use_dynamic_credentials', 'true')
        config_parser.set('auth', 'create_isolated_networks', 'false')
    else:
        raise NotSupportedError('Not support %s' % creds_provider)
    # Create role for object storage
    create_if_not_exist(keystone.roles, 'role', STORAGE_ROLE_NAME)
    config_parser.set('object-storage', 'operator_role', STORAGE_ROLE_NAME)
Example #11
0
    def setPhoto(self, image, overlay_exclude_mark=False):
        """
        Convert a color or grayscale image to a pixmap and assign it to the photo object.

        :param image: Image to be displayed. The image is assumed to be in color or grayscale
                      format of length uint8 or uint16.
        :param overlay_exclude_mark: If True, overlay a crossed-out red circle in the upper left
                                     corner of the image.
        :return: -
        """

        # Indicate that an image is being loaded.
        self.image_loading_busy = True

        # Convert the image into uint8 format. If the frame type is uint16, values correspond to
        # 16bit resolution.
        if image.dtype == uint16:
            image_uint8 = (image / 256.).astype(uint8)
        elif image.dtype == uint8:
            image_uint8 = image.astype(uint8)
        else:
            raise NotSupportedError(
                "Attempt to set a photo in frame viewer with type neither"
                " uint8 nor uint16")

        self.shape_y = image_uint8.shape[0]
        self.shape_x = image_uint8.shape[1]

        # Normalize the frame brightness.
        image_uint8 = normalize(image_uint8,
                                None,
                                alpha=0,
                                beta=255,
                                norm_type=NORM_MINMAX)

        # Overlay a crossed-out red circle in the upper left image corner.
        if overlay_exclude_mark:
            if len(image_uint8.shape) == 2:
                image_uint8 = cvtColor(image_uint8, COLOR_GRAY2RGB)

            # The position and size of the mark are relative to the image size.
            pos_x = int(round(self.shape_x / 10))
            pos_y = int(round(self.shape_y / 10))
            diameter = int(round(min(pos_x, pos_y) / 4))
            circle(image_uint8, (pos_x, pos_y), diameter, (255, 0, 0), 2)
            line(image_uint8, (pos_x - diameter, pos_y + diameter),
                 (pos_x + diameter, pos_y - diameter), (255, 0, 0), 2)

        # The image is monochrome:
        if len(image_uint8.shape) == 2:
            qt_image = QtGui.QImage(image_uint8, self.shape_x, self.shape_y,
                                    self.shape_x,
                                    QtGui.QImage.Format_Grayscale8)
        # The image is RGB color.
        else:
            qt_image = QtGui.QImage(image_uint8, self.shape_x, self.shape_y,
                                    3 * self.shape_x,
                                    QtGui.QImage.Format_RGB888)
        pixmap = QtGui.QPixmap(qt_image)

        if pixmap and not pixmap.isNull():
            self._empty = False
            self._photo.setPixmap(pixmap)
        else:
            self._empty = True
            self._photo.setPixmap(QtGui.QPixmap())

        # Release the image loading flag.
        self.image_loading_busy = False
Example #12
0
    def frame_score(self):
        """
        Compute the frame quality values and normalize them such that the best value is 1.

        :return: -
        """

        if self.configuration.rank_frames_method == "xy gradient":
            method = Miscellaneous.local_contrast
        elif self.configuration.rank_frames_method == "Laplace":
            method = Miscellaneous.local_contrast_laplace
        elif self.configuration.rank_frames_method == "Sobel":
            method = Miscellaneous.local_contrast_sobel
        else:
            raise NotSupportedError("Ranking method " + self.configuration.rank_frames_method +
                                    " not supported")

        # Reset frames index translation, if active.
        if self.frames.index_translation_active:
            self.frames.reset_index_translation()

        # For all frames compute the quality with the selected method.
        if method != Miscellaneous.local_contrast_laplace:
            for frame_index in range(self.number_original):
                frame = self.frames.frames_mono_blurred(frame_index)
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit("Rank all frames",
                                              int(round(10*frame_index / self.number_original) * 10))
                if self.configuration.frames_normalization:
                    self.frame_ranks_original.append(
                        method(frame, self.configuration.rank_frames_pixel_stride) /
                        self.frames.average_brightness(frame_index))
                else:
                    self.frame_ranks_original.append(
                        method(frame, self.configuration.rank_frames_pixel_stride))
        else:
            for frame_index in range(self.number_original):
                frame = self.frames.frames_mono_blurred_laplacian(frame_index)
                # self.frame_ranks.append(mean((frame - frame.mean())**2))
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit("Rank all frames",
                                              int(round(10*frame_index / self.number_original) * 10))
                if self.configuration.frames_normalization:
                    self.frame_ranks_original.append(meanStdDev(frame)[1][0][0] /
                        self.frames.average_brightness(frame_index))
                else:
                    self.frame_ranks_original.append(meanStdDev(frame)[1][0][0])

        # Sort the frame indices in descending order of quality.
        self.quality_sorted_indices_original = sorted(range(self.number_original),
                                             key=self.frame_ranks_original.__getitem__, reverse=True)

        # Compute the inverse index list: For each frame the rank_index is the corresponding index
        # in the sorted frame_ranks list.
        self.rank_indices_original = [self.quality_sorted_indices_original.index(index) for index in
                             range(self.number_original)]

        if self.progress_signal is not None:
            self.progress_signal.emit("Rank all frames", 100)

        # Set the index of the best frame, and normalize all quality values.
        self.frame_ranks_max_index_original = self.quality_sorted_indices_original[0]
        self.frame_ranks_max_value_original = self.frame_ranks_original[self.frame_ranks_max_index_original]
        self.frame_ranks_original /= self.frame_ranks_max_value_original

        # Keep the original ranking data and prepare for index translation. The translation can be
        # reset later, and the original ranking be re-established.
        self.number = self.number_original
        self.frame_ranks = self.frame_ranks_original
        self.quality_sorted_indices = self.quality_sorted_indices_original
        self.rank_indices = self.rank_indices_original
        self.frame_ranks_max_index = self.frame_ranks_max_index_original
        self.frame_ranks_max_value = self.frame_ranks_max_value_original
Example #13
0
    def average_frame(self, average_frame_number=None, color=False):
        """
        Compute an averaged frame from the best (monochrome) frames.

        :param average_frame_number: Number of best frames to be averaged. If None, the number is
                                     computed from the configuration parameter
                                      "align_frames_average_frame_percent"
        :param color: If True, compute an average of the original (color) images. Otherwise use the
                      monochrome frame versions.
        :return: The averaged frame
        """

        if self.intersection_shape is None:
            raise WrongOrderingError(
                "Method 'average_frames' is called before 'align_frames'")

        # Compute global offsets of current frame relative to intersection frame. Start with
        # Initializing lists which for each frame give the dy and dx displacements between the
        # reference frame and current frame.
        self.dy = [
            self.intersection_shape[0][0] - self.frame_shifts[idx][0]
            for idx in range(self.frames.number)
        ]
        self.dx = [
            self.intersection_shape[1][0] - self.frame_shifts[idx][1]
            for idx in range(self.frames.number)
        ]

        # If the number of frames is not specified explicitly, compute it from configuration.
        if average_frame_number is not None:
            self.average_frame_number = average_frame_number
        else:
            self.average_frame_number = max(
                ceil(self.frames.number *
                     self.configuration.align_frames_average_frame_percent /
                     100.), 1)

        shifts = [
            self.frame_shifts[i]
            for i in self.quality_sorted_indices[:self.average_frame_number]
        ]

        # Create an empty numpy buffer. The first and second dimensions are the y and x
        # coordinates. For color frames add a third dimension. Add all frames to the buffer.
        if color:
            self.mean_frame = zeros([
                self.intersection_shape[0][1] - self.intersection_shape[0][0],
                self.intersection_shape[1][1] - self.intersection_shape[1][0],
                3
            ],
                                    dtype=float32)
            for idx in range(self.average_frame_number):
                self.mean_frame += self.frames.frames(self.quality_sorted_indices[idx]) \
                    [self.intersection_shape[0][0] - shifts[idx][0]:
                    self.intersection_shape[0][1] - shifts[idx][0],
                    self.intersection_shape[1][0] - shifts[idx][1]:
                    self.intersection_shape[1][1] - shifts[idx][1], :]
        else:
            self.mean_frame = zeros([
                self.intersection_shape[0][1] - self.intersection_shape[0][0],
                self.intersection_shape[1][1] - self.intersection_shape[1][0]
            ],
                                    dtype=float32)
            for idx in range(self.average_frame_number):
                self.mean_frame += self.frames.frames_mono(self.quality_sorted_indices[idx]) \
                    [self.intersection_shape[0][0] - shifts[idx][0]:
                    self.intersection_shape[0][1] - shifts[idx][0],
                    self.intersection_shape[1][0] - shifts[idx][1]:
                    self.intersection_shape[1][1] - shifts[idx][1]]

        # Compute the mean frame by dividing by the number of frames, and convert values to 16bit.
        if self.frames.dt0 == uint8:
            scaling = 256. / self.average_frame_number
        elif self.frames.dt0 == uint16:
            scaling = 1. / self.average_frame_number
        else:
            raise NotSupportedError(
                "Attempt to compute the average frame from images with type"
                " neither uint8 nor uint16")

        self.mean_frame = (self.mean_frame * scaling).astype(int32)

        return self.mean_frame
Example #14
0
    def align_frames(self):
        """
        Compute the displacement of all frames relative to the sharpest frame using the alignment
        rectangle.

        :return: -
        """

        if self.configuration.align_frames_mode == "Surface":
            # For "Surface" mode the alignment rectangle has to be selected first.
            if self.x_low_opt is None:
                raise WrongOrderingError(
                    "Method 'align_frames' is called before 'select_alignment_rect'"
                )

            # From the sharpest frame cut out the alignment rectangle. The shifts of all other frames
            # will be computed relativ to this patch.
            if self.configuration.align_frames_method == "MultiLevelCorrelation":
                # MultiLevelCorrelation uses two reference windows with different resolution. Also,
                # please note that the data type is float32 in this case.
                reference_frame = self.frames.frames_mono_blurred(
                    self.frame_ranks_max_index).astype(float32)
                self.reference_window = reference_frame[
                    self.y_low_opt:self.y_high_opt,
                    self.x_low_opt:self.x_high_opt]
                # For the first phase a box with half the resolution is constructed.
                self.reference_window_first_phase = self.reference_window[::
                                                                          2, ::
                                                                          2]
            else:
                # For all other methods, the reference window is of type int32.
                reference_frame = self.frames.frames_mono_blurred(
                    self.frame_ranks_max_index).astype(int32)
                self.reference_window = reference_frame[
                    self.y_low_opt:self.y_high_opt,
                    self.x_low_opt:self.x_high_opt]

            self.reference_window_shape = self.reference_window.shape

        elif self.configuration.align_frames_mode == "Planet":
            # For "Planetary" mode compute the center of gravity for the reference image.
            cog_reference_y, cog_reference_x = AlignFrames.center_of_gravity(
                self.frames.frames_mono_blurred(self.frame_ranks_max_index))

        else:
            raise NotSupportedError("Frame alignment mode '" +
                                    self.configuration.align_frames_mode +
                                    "' not supported")

        # Initialize a list which for each frame contains the shifts in y and x directions.
        self.frame_shifts = [None] * self.frames.number

        # Initialize a counter of processed frames for progress bar signalling. It is set to one
        # because in the loop below the optimal frame is not counted.
        number_processed = 1

        # Loop over all frames. Begin with the sharpest (reference) frame
        for idx in chain(reversed(range(self.frame_ranks_max_index + 1)),
                         range(self.frame_ranks_max_index,
                               self.frames.number)):

            if idx == self.frame_ranks_max_index:
                # For the sharpest frame the displacement is 0 because it is used as the reference.
                self.frame_shifts[idx] = [0, 0]
                # Initialize two variables which keep the shift values of the previous step as
                # the starting point for the next step. This reduces the search radius if frames are
                # drifting.
                dy_min_cum = dx_min_cum = 0

            # For all other frames: Compute the global shift, using the "blurred" monochrome image.
            else:
                # After every "signal_step_size"th frame, send a progress signal to the main GUI.
                if self.progress_signal is not None and number_processed % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Align all frames",
                        int(
                            round(10 * number_processed / self.frames.number) *
                            10))

                frame = self.frames.frames_mono_blurred(idx)

                # In Planetary mode the shift of the "center of gravity" of the image is computed.
                # This algorithm cannot fail.
                if self.configuration.align_frames_mode == "Planet":

                    cog_frame = AlignFrames.center_of_gravity(frame)
                    self.frame_shifts[idx] = [
                        cog_reference_y - cog_frame[0],
                        cog_reference_x - cog_frame[1]
                    ]

                # In Surface mode various methods can be used to measure the shift from one frame
                # to the next. The method "Translation" is special: Using phase correlation it is
                # the only method not based on a local search algorithm. It is treated differently
                # here because it does not require a re-shifting of the alignment patch.
                elif self.configuration.align_frames_method == "Translation":
                    # The shift is computed with cross-correlation. Cut out the alignment patch and
                    # compute its translation relative to the reference.
                    frame_window = self.frames.frames_mono_blurred(
                        idx)[self.y_low_opt:self.y_high_opt,
                             self.x_low_opt:self.x_high_opt]
                    self.frame_shifts[idx] = Miscellaneous.translation(
                        self.reference_window, frame_window,
                        self.reference_window_shape)

                # Now treat all "Surface" mode cases using local search algorithms. In each case
                # the result is the shift vector [dy_min, dx_min]. The search can fail (if within
                # the search radius no optimum is found). If that happens for at least one frame,
                # an exception is raised. The workflow thread then tries again using another
                # alignment patch.
                else:

                    if self.configuration.align_frames_method == "MultiLevelCorrelation":
                        # The shift is computed in two phases: First on a coarse pixel grid,
                        # and then on the original grid in a small neighborhood around the optimum
                        # found in the first phase.
                        shift_y_local_first_phase, shift_x_local_first_phase, \
                        success_first_phase, shift_y_local_second_phase, \
                        shift_x_local_second_phase, success_second_phase = \
                            Miscellaneous.multilevel_correlation(
                            self.reference_window_first_phase, frame,
                            self.configuration.frames_gauss_width,
                            self.reference_window, self.y_low_opt - dy_min_cum,
                                                          self.y_high_opt - dy_min_cum,
                                                          self.x_low_opt - dx_min_cum,
                                                          self.x_high_opt - dx_min_cum,
                            self.configuration.align_frames_search_width,
                            weight_matrix_first_phase=None)

                        success = success_first_phase and success_second_phase
                        if success:
                            [dy_min, dx_min] = [
                                shift_y_local_first_phase +
                                shift_y_local_second_phase,
                                shift_x_local_first_phase +
                                shift_x_local_second_phase
                            ]

                    elif self.configuration.align_frames_method == "RadialSearch":
                        # Spiral out from the shift position of the previous frame and search for the
                        # local optimum.
                        [dy_min,
                         dx_min], dev_r = Miscellaneous.search_local_match(
                             self.reference_window,
                             frame,
                             self.y_low_opt - dy_min_cum,
                             self.y_high_opt - dy_min_cum,
                             self.x_low_opt - dx_min_cum,
                             self.x_high_opt - dx_min_cum,
                             self.configuration.align_frames_search_width,
                             self.configuration.align_frames_sampling_stride,
                             sub_pixel=False)

                        # The search was not successful if a zero shift was reported after more
                        # than two search cycles.
                        success = len(dev_r) <= 2 or dy_min != 0 or dx_min != 0

                    elif self.configuration.align_frames_method == "SteepestDescent":
                        # Spiral out from the shift position of the previous frame and search for the
                        # local optimum.
                        [dy_min, dx_min
                         ], dev_r = Miscellaneous.search_local_match_gradient(
                             self.reference_window, frame,
                             self.y_low_opt - dy_min_cum,
                             self.y_high_opt - dy_min_cum,
                             self.x_low_opt - dx_min_cum,
                             self.x_high_opt - dx_min_cum,
                             self.configuration.align_frames_search_width,
                             self.configuration.align_frames_sampling_stride,
                             self.dev_table)

                        # The search was not successful if a zero shift was reported after more
                        # than two search cycles.
                        success = len(dev_r) <= 2 or dy_min != 0 or dx_min != 0

                    else:
                        raise NotSupportedError(
                            "Frame alignment method " +
                            configuration.align_frames_method +
                            " not supported")

                    # If the local search was unsuccessful, quit the frame loop with an error.
                    if not success:
                        raise InternalError("frame " + str(idx))

                    # Update the cumulative shift values to be used as starting point for the
                    # next frame.
                    dy_min_cum += dy_min
                    dx_min_cum += dx_min
                    self.frame_shifts[idx] = [dy_min_cum, dx_min_cum]

                    # If the alignment window gets too close to a frame edge, move it away from
                    # that edge by half the border width. First check if the reference window still
                    # fits into the shifted frame.
                    if self.shape[0] - abs(
                            dy_min_cum) - 2 * self.configuration.align_frames_search_width - \
                            self.configuration.align_frames_border_width < \
                            self.reference_window_shape[0] or self.shape[1] - abs(
                            dx_min_cum) - 2 * self.configuration.align_frames_search_width - \
                            self.configuration.align_frames_border_width < \
                            self.reference_window_shape[1]:
                        raise ArgumentError(
                            "Frame stabilization window does not fit into"
                            " intersection")

                    new_reference_window = False
                    # Start with the lower y edge.
                    while self.y_low_opt - dy_min_cum < \
                            self.configuration.align_frames_search_width + \
                            self.configuration.align_frames_border_width / 2:
                        self.y_low_opt += ceil(
                            self.configuration.align_frames_border_width / 2.)
                        self.y_high_opt += ceil(
                            self.configuration.align_frames_border_width / 2.)
                        new_reference_window = True
                    # Now the upper y edge.
                    while self.y_high_opt - dy_min_cum > self.shape[
                        0] - self.configuration.align_frames_search_width - \
                            self.configuration.align_frames_border_width / 2:
                        self.y_low_opt -= ceil(
                            self.configuration.align_frames_border_width / 2.)
                        self.y_high_opt -= ceil(
                            self.configuration.align_frames_border_width / 2.)
                        new_reference_window = True
                    # Now the lower x edge.
                    while self.x_low_opt - dx_min_cum < \
                            self.configuration.align_frames_search_width + \
                            self.configuration.align_frames_border_width / 2:
                        self.x_low_opt += ceil(
                            self.configuration.align_frames_border_width / 2.)
                        self.x_high_opt += ceil(
                            self.configuration.align_frames_border_width / 2.)
                        new_reference_window = True
                    # Now the upper x edge.
                    while self.x_high_opt - dx_min_cum > self.shape[
                        1] - self.configuration.align_frames_search_width - \
                            self.configuration.align_frames_border_width / 2:
                        self.x_low_opt -= ceil(
                            self.configuration.align_frames_border_width / 2.)
                        self.x_high_opt -= ceil(
                            self.configuration.align_frames_border_width / 2.)
                        new_reference_window = True

                    # If the window was moved, update the "reference window(s)".
                    if new_reference_window:
                        if self.configuration.align_frames_method == "MultiLevelCorrelation":
                            self.reference_window = reference_frame[
                                self.y_low_opt:self.y_high_opt,
                                self.x_low_opt:self.x_high_opt]
                            # For the first phase a box with half the resolution is constructed.
                            self.reference_window_first_phase = self.reference_window[::
                                                                                      2, ::
                                                                                      2]
                        else:
                            self.reference_window = reference_frame[
                                self.y_low_opt:self.y_high_opt,
                                self.x_low_opt:self.x_high_opt]

                # This frame is processed, go to next one.
                number_processed += 1

        if self.progress_signal is not None:
            self.progress_signal.emit("Align all frames", 100)

        # Compute the shape of the area contained in all frames in the form [[y_low, y_high],
        # [x_low, x_high]]
        self.intersection_shape = [[
            max(b[0] for b in self.frame_shifts),
            min(b[0] for b in self.frame_shifts) + self.shape[0]
        ],
                                   [
                                       max(b[1] for b in self.frame_shifts),
                                       min(b[1] for b in self.frame_shifts) +
                                       self.shape[1]
                                   ]]
    def compute_frame_qualities(self):
        """
        For each alignment point compute a ranking of best frames. Store the list in the
        alignment point dictionary with the key 'best_frame_indices'.

        Consider the special case that sampled-down Laplacians have been stored for frame ranking.
        In this case they can be re-used for ranking the boxes around alignment points (but only
        if "Laplace" has been selected for alignment point ranking).

        :return: -
        """

        # If the user has entered a value for the number of frames, use it.
        if self.configuration.alignment_points_frame_number is not None:
            self.stack_size = self.configuration.alignment_points_frame_number
        # Otherwise compute the stack size from the given percentage. Take at least one frame.
        else:
            self.stack_size = max(
                int(
                    ceil(self.frames.number *
                         self.configuration.alignment_points_frame_percent /
                         100.)), 1)
        # Select the ranking method.
        if self.configuration.alignment_points_rank_method == "xy gradient":
            method = Miscellaneous.local_contrast
        elif self.configuration.alignment_points_rank_method == "Laplace":
            method = Miscellaneous.local_contrast_laplace
        elif self.configuration.alignment_points_rank_method == "Sobel":
            method = Miscellaneous.local_contrast_sobel
        else:
            raise NotSupportedError(
                "Ranking method " +
                self.configuration.alignment_points_rank_method +
                " not supported")

        # Compute the frequency of progress signals in the computational loop.
        if self.progress_signal is not None:
            self.signal_loop_length = max(self.frames.number, 1)
            self.signal_step_size = max(round(self.frames.number / 10), 1)

        # Initialize a list which for each AP contains the qualities of all frames at this point.
        for alignment_point in self.alignment_points:
            alignment_point['frame_qualities'] = []

        if self.configuration.rank_frames_method != "Laplace" or \
                self.configuration.alignment_points_rank_method != "Laplace":
            # There are no stored Laplacians, or they cannot be used for the specified method.
            # Cycle through all frames and alignment points:
            for frame_index in range(self.frames.number):
                frame = self.frames.frames_mono_blurred(frame_index)

                # After every "signal_step_size"th frame, send a progress signal to the main GUI.
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Rank frames at APs",
                        int((frame_index / self.signal_loop_length) * 100.))

                for ap_index, alignment_point in enumerate(
                        self.alignment_points):
                    # Compute patch bounds within the current frame.
                    y_low = max(
                        0, alignment_point['patch_y_low'] +
                        self.align_frames.dy[frame_index])
                    y_high = min(
                        self.frames.shape[0], alignment_point['patch_y_high'] +
                        self.align_frames.dy[frame_index])
                    x_low = max(
                        0, alignment_point['patch_x_low'] +
                        self.align_frames.dx[frame_index])
                    x_high = min(
                        self.frames.shape[1], alignment_point['patch_x_high'] +
                        self.align_frames.dx[frame_index])
                    # Compute the frame quality and append it to the list for this alignment point.
                    alignment_point['frame_qualities'].append(
                        method(
                            frame[y_low:y_high, x_low:x_high], self.
                            configuration.alignment_points_rank_pixel_stride))
        else:
            # Sampled-down Laplacians of all blurred frames have been computed in
            # "frames.frames_mono_blurred_laplacian". Cut out boxes around alignment points from
            # those objects, rather than computing new Laplacians. Cycle through all frames and
            # alignment points. Use the blurred monochrome image for ranking.
            for frame_index in range(self.frames.number):
                frame = self.frames.frames_mono_blurred_laplacian(frame_index)

                # After every "signal_step_size"th frame, send a progress signal to the main GUI.
                if self.progress_signal is not None and frame_index % self.signal_step_size == 1:
                    self.progress_signal.emit(
                        "Rank frames at APs",
                        int((frame_index / self.signal_loop_length) * 100.))

                for ap_index, alignment_point in enumerate(
                        self.alignment_points):
                    # Compute patch bounds within the current frame.
                    y_low = int(
                        max(
                            0, alignment_point['patch_y_low'] +
                            self.align_frames.dy[frame_index]) /
                        self.configuration.align_frames_sampling_stride)
                    y_high = int(
                        min(
                            self.frames.shape[0],
                            alignment_point['patch_y_high'] +
                            self.align_frames.dy[frame_index]) /
                        self.configuration.align_frames_sampling_stride)
                    x_low = int(
                        max(
                            0, alignment_point['patch_x_low'] +
                            self.align_frames.dx[frame_index]) /
                        self.configuration.align_frames_sampling_stride)
                    x_high = int(
                        min(
                            self.frames.shape[1],
                            alignment_point['patch_x_high'] +
                            self.align_frames.dx[frame_index]) /
                        self.configuration.align_frames_sampling_stride)
                    # Compute the frame quality and append it to the list for this alignment point.
                    alignment_point['frame_qualities'].append(
                        meanStdDev(frame[y_low:y_high, x_low:x_high])[1][0][0])

        if self.progress_signal is not None:
            self.progress_signal.emit("Rank frames at APs", 100)

        # Initialize the alignment point lists for all frames.
        self.frames.reset_alignment_point_lists()
        # For each alignment point sort the computed quality ranks in descending order.
        for alignment_point_index, alignment_point in enumerate(
                self.alignment_points):
            # Truncate the list to the number of frames to be stacked for each alignmeent point.
            alignment_point['best_frame_indices'] = sorted(
                range(len(alignment_point['frame_qualities'])),
                key=alignment_point['frame_qualities'].__getitem__,
                reverse=True)[:self.stack_size]
            # Add this alignment point to the AP lists of those frames where the AP is to be used.
            for frame_index in alignment_point['best_frame_indices']:
                self.frames.used_alignment_points[frame_index].append(
                    alignment_point_index)
Example #16
0
def config_identity(config_parser, private_vip, admin_user_name, admin_pwd,
                    admin_tenant_name, creds_provider, default_user_name=None,
                    default_pwd=None, alt_user_name=None, alt_pwd=None):
    uri_v3 = get_auth_url(private_vip, 'v3')
    uri_v2 = get_auth_url(private_vip)
    keystone = get_keystone_client(private_vip=private_vip,
                                   username=admin_user_name,
                                   password=admin_pwd,
                                   project_name=admin_tenant_name,
                                   domain_name=DEFAULT_DOMAIN_ID)
    admin_tenant = get_entity(keystone.projects, 'project', admin_tenant_name)
    config_parser.set('identity', 'uri_v3', uri_v3)
    config_parser.set('identity', 'uri', uri_v2)
    config_parser.set('identity', 'auth_version', 'v2')
    config_parser.set('auth', 'admin_tenant_name', admin_tenant_name)
    config_parser.set('auth', 'admin_password', admin_pwd)
    config_parser.set('auth', 'admin_username', admin_user_name)
    # Create tempest test role
    test_role = create_if_not_exist(keystone.roles, 'role', ROLE_NAME)
    config_parser.set('auth', 'tempest_roles', ROLE_NAME)
    # Both SQL backend and LDAP backend is Default
    config_parser.set('auth', 'admin_domain_name', 'Default')
    if creds_provider in [LEGACY_PROVIDER, PRE_PROVISIONED_PROVIDER]:
        # Create default tenant and user
        default_domain = keystone.domains.get(DEFAULT_DOMAIN_ID)
        default_tenant = create_if_not_exist(keystone.projects, 'project',
                                             TENANT_NAME,
                                             domain=default_domain)
        default_user = create_if_not_exist(keystone.users, 'user',
                                           default_user_name,
                                           password=default_pwd,
                                           tenant_id=default_tenant.id)

        grant_role_on_project(keystone, default_tenant, default_user,
                              test_role)
        # Create alter tenant and user
        alt_tenant = create_if_not_exist(keystone.projects, 'project',
                                         ALT_TENANT_NAME,
                                         domain=default_domain)
        alt_user = create_if_not_exist(keystone.users, 'user', alt_user_name,
                                       password=alt_pwd,
                                       tenant_id=alt_tenant.id)

        grant_role_on_project(keystone, alt_tenant, alt_user, test_role)
        if LEGACY_PROVIDER == creds_provider:
            # Legacy provider can only be used before Newton release.
            config_parser.set('identity', 'tenant_name', TENANT_NAME)
            config_parser.set('identity', 'username', default_user_name)
            config_parser.set('identity', 'password', default_pwd)
            config_parser.set('identity', 'alt_tenant_name', ALT_TENANT_NAME)
            config_parser.set('identity', 'alt_username', alt_user_name)
            config_parser.set('identity', 'alt_password', alt_pwd)
        elif PRE_PROVISIONED_PROVIDER == creds_provider:
            accounts = list()
            accounts.append(add_account(default_user_name, default_pwd,
                                        TENANT_NAME, roles=[ROLE_NAME]))
            accounts.append(add_account(alt_user_name, alt_pwd,
                                        ALT_TENANT_NAME, roles=[ROLE_NAME]))
            accounts.append(add_account(admin_user_name, admin_pwd,
                                        admin_tenant_name, roles=['admin']))
            test_accounts_file = os.path.join(os.getcwd(), TEMPEST_DIR,
                                              'etc/accounts.yaml')
            with open(test_accounts_file, 'w') as fh:
                yaml.dump(accounts, fh, default_flow_style=False,
                          default_style=False, indent=2, encoding='utf-8',
                          allow_unicode=True)
            config_parser.set('auth', 'test_accounts_file', test_accounts_file)
        config_parser.set('auth', 'use_dynamic_credentials', 'false')
        config_parser.set('auth', 'create_isolated_networks', 'false')
    elif creds_provider == DYNAMIC_PROVIDER:
        config_parser.set('auth', 'use_dynamic_credentials', 'true')
        config_parser.set('auth', 'create_isolated_networks', 'false')
    else:
        raise NotSupportedError('Not support %s' % creds_provider)
    # Create role for object storage
    create_if_not_exist(keystone.roles, 'role', STORAGE_ROLE_NAME)
    config_parser.set('object-storage', 'operator_role', STORAGE_ROLE_NAME)
    # Create role and add it to admin user for heat tempest tests.
    heat_role = get_entity(keystone.roles, 'role', 'heat_stack_owner')
    if not heat_role:
        LOG.info("Create role heat_stack_owner")
        heat_role = keystone.roles.create('heat_stack_owner')
        admin_user = get_entity(keystone.users, 'user', admin_user_name)
        grant_role_on_project(keystone, admin_tenant, admin_user, heat_role)
    def compute_shift_alignment_point(self,
                                      frame_mono_blurred,
                                      frame_index,
                                      alignment_point_index,
                                      de_warp=True):
        """
        Compute the [y, x] pixel shift vector at a given alignment point relative to the mean frame.
        Four different methods can be used to compute the shift values:
        - a subpixel algorithm from "skimage.feature"
        - a phase correlation algorithm (miscellaneous.translation)
        - a local search algorithm (spiralling outwards), see method "search_local_match",
          optionally with subpixel accuracy.
        - a local search algorithm, based on steepest descent, see method
          "search_local_match_gradient". This method is faster than the previous one, but it has no
          subpixel option.

        Be careful with the sign of the local shift values. For the first two methods, a positive
        value means that the current frame has to be shifted in the positive coordinate direction
        in order to make objects in the frame match with their counterparts in the reference frame.
        In other words: If the shift is positive, an object in the current frame is at lower pixel
        coordinates as compared to the reference. This is very counter-intuitive, but to make the
        three methods consistent, the same approach was followed in the implementation of the third
        method "search_local_match", contained in this module. There, a pixel box around an
        alignment point in the current frame is moved until the content of the box matches with the
        corresponding box in the reference frame. If at this point the box is shifted towards a
        higher coordinate value, this value is returned with a negative sign as the local shift.

        :param frame_mono_blurred: Gaussian-blurred version of the frame with index "frame_index"
        :param frame_index: Index of the selected frame in the list of frames
        :param alignment_point_index: Index of the selected alignment point
        :param de_warp: If True, include local warp shift computation. If False, only apply
                        global frame shift.
        :return: Local shift vector [dy, dx]
        """

        alignment_point = self.alignment_points[alignment_point_index]
        y_low = alignment_point['box_y_low']
        y_high = alignment_point['box_y_high']
        x_low = alignment_point['box_x_low']
        x_high = alignment_point['box_x_high']
        reference_box = alignment_point['reference_box']

        # The offsets dy and dx are caused by two effects: First, the mean frame is smaller
        # than the original frames. It only contains their intersection. And second, because the
        # given frame is globally shifted as compared to the mean frame.
        dy = self.align_frames.dy[frame_index]
        dx = self.align_frames.dx[frame_index]

        if de_warp:
            # Use subpixel registration from skimage.feature, with accuracy 1/10 pixels.
            if self.configuration.alignment_points_method == 'Subpixel':
                # Cut out the alignment box from the given frame. Take into account the offsets
                # explained above.
                box_in_frame = frame_mono_blurred[y_low + dy:y_high + dy,
                                                  x_low + dx:x_high + dx]
                shift_pixel, error, diffphase = register_translation(
                    reference_box, box_in_frame, 10, space='real')

            # Use a simple phase shift computation (contained in module "miscellaneous").
            elif self.configuration.alignment_points_method == 'CrossCorrelation':
                # Cut out the alignment box from the given frame. Take into account the offsets
                # explained above.
                box_in_frame = frame_mono_blurred[y_low + dy:y_high + dy,
                                                  x_low + dx:x_high + dx]
                shift_pixel = Miscellaneous.translation(
                    reference_box, box_in_frame, box_in_frame.shape)

            # Use a local search (see method "search_local_match" below.
            elif self.configuration.alignment_points_method == 'RadialSearch':
                shift_pixel, dev_r = Miscellaneous.search_local_match(
                    reference_box,
                    frame_mono_blurred,
                    y_low + dy,
                    y_high + dy,
                    x_low + dx,
                    x_high + dx,
                    self.configuration.alignment_points_search_width,
                    self.configuration.alignment_points_sampling_stride,
                    sub_pixel=self.configuration.
                    alignment_points_local_search_subpixel)

            # Use the steepest descent search method.
            elif self.configuration.alignment_points_method == 'SteepestDescent':
                shift_pixel, dev_r = Miscellaneous.search_local_match_gradient(
                    reference_box, frame_mono_blurred, y_low + dy, y_high + dy,
                    x_low + dx, x_high + dx,
                    self.configuration.alignment_points_search_width,
                    self.configuration.alignment_points_sampling_stride,
                    self.dev_table)
            else:
                raise NotSupportedError(
                    "The point shift computation method " +
                    self.configuration.alignment_points_method +
                    " is not implemented")

            # Return the computed shift vector.
            return shift_pixel
        else:
            # If no de-warping is computed, just return the zero vector.
            return [0, 0]