Пример #1
0
    def check_is_volume_bound_to_pool(self, conn, volumeInstance):
        """Check the space consumed of a volume.

        :param conn: the connection information to the ecom server
        :param volumeInstance: the volume Instance
        :returns: spaceConsumed
        """
        foundSpaceConsumed = None
        unitnames = conn.References(
            volumeInstance, ResultClass='CIM_AllocatedFromStoragePool',
            Role='Dependent')

        for unitname in unitnames:
            propertiesList = unitname.properties.items()
            for properties in propertiesList:
                if properties[0] == 'EMCBoundToThinStoragePool':
                    cimProperties = properties[1]
                    foundSpaceConsumed = cimProperties.value
                    break
            if foundSpaceConsumed is not None:
                break
        if 'True' in six.text_type(cimProperties.value):
            return 'True'
        elif 'False' in six.text_type(cimProperties.value):
            return 'False'
        else:
            return 'Undetermined'
Пример #2
0
    def assertRaisesRegexp(self, expected_exception, expected_regexp,
                           callable_obj, *args, **kwargs):
        """Assert that the message in a raised exception matches a regexp."""
        try:
            callable_obj(*args, **kwargs)
        except expected_exception as exc_value:
            if isinstance(expected_regexp, six.string_types):
                expected_regexp = re.compile(expected_regexp)

            if isinstance(exc_value.args[0], six.text_type):
                if not expected_regexp.search(six.text_type(exc_value)):
                    raise self.failureException(
                        '"%s" does not match "%s"' %
                        (expected_regexp.pattern, six.text_type(exc_value)))
            else:
                if not expected_regexp.search(str(exc_value)):
                    raise self.failureException(
                        '"%s" does not match "%s"' %
                        (expected_regexp.pattern, str(exc_value)))
        else:
            if hasattr(expected_exception, '__name__'):
                excName = expected_exception.__name__
            else:
                excName = str(expected_exception)
            raise self.failureException("%s not raised" % excName)
Пример #3
0
    def _create_override(self, request_user, subsection_grade_model, **override_data):
        """
        Helper method to create a `PersistentSubsectionGradeOverride` object
        and send a `SUBSECTION_OVERRIDE_CHANGED` signal.
        """
        override = PersistentSubsectionGradeOverride.update_or_create_override(
            requesting_user=request_user,
            subsection_grade_model=subsection_grade_model,
            feature=grades_constants.GradeOverrideFeatureEnum.gradebook,
            **override_data
        )

        set_event_transaction_type(grades_events.SUBSECTION_GRADE_CALCULATED)
        create_new_event_transaction_id()

        recalculate_subsection_grade_v3.apply(
            kwargs=dict(
                user_id=subsection_grade_model.user_id,
                anonymous_user_id=None,
                course_id=text_type(subsection_grade_model.course_id),
                usage_id=text_type(subsection_grade_model.usage_key),
                only_if_higher=False,
                expected_modified_time=to_timestamp(override.modified),
                score_deleted=False,
                event_transaction_id=six.text_type(get_event_transaction_id()),
                event_transaction_type=six.text_type(get_event_transaction_type()),
                score_db_table=grades_constants.ScoreDatabaseTableEnum.overrides,
                force_update_subsections=True,
            )
        )
        # Emit events to let our tracking system to know we updated subsection grade
        grades_events.subsection_grade_calculated(subsection_grade_model)
        return override
Пример #4
0
 def test_thread_group_handles_child_exception(self):
     try:
         with context.ThreadGroup() as tg:
             tg.spawn('raiser1', self._raise_test_exc, 'exc1')
     except ex.ThreadException as te:
         self.assertIn('exc1', six.text_type(te))
         self.assertIn('raiser1', six.text_type(te))
Пример #5
0
    def __init__(self, error=None, path=None, message=None,
                 resource=None):
        if path is None:
            path = []
        elif isinstance(path, six.string_types):
            path = [path]

        if resource is not None and not path:
            path = [resource.stack.t.get_section_name(
                resource.stack.t.RESOURCES), resource.name]
        if isinstance(error, Exception):
            if isinstance(error, StackValidationFailed):
                str_error = error.error
                message = error.error_message
                path = path + error.path
                # This is a hack to avoid the py3 (chained exception)
                # json serialization circular reference error from
                # oslo.messaging.
                self.args = error.args
            else:
                str_error = six.text_type(type(error).__name__)
                message = six.text_type(error)
        else:
            str_error = error

        super(StackValidationFailed, self).__init__(error=str_error, path=path,
                                                    message=message)
Пример #6
0
def print_dict(dct, dict_property="Property", wrap=0):
    """Print a `dict` as a table of two columns.

    :param dct: `dict` to print
    :param dict_property: name of the first column
    :param wrap: wrapping for the second column
    """
    pt = prettytable.PrettyTable([dict_property, 'Value'])
    pt.align = 'l'
    for k, v in six.iteritems(dct):
        # convert dict to str to check length
        if isinstance(v, dict):
            v = six.text_type(v)
        if wrap > 0:
            v = textwrap.fill(six.text_type(v), wrap)
        # if value has a newline, add in multiple rows
        # e.g. fault with stacktrace
        if v and isinstance(v, six.string_types) and r'\n' in v:
            lines = v.strip().split(r'\n')
            col1 = k
            for line in lines:
                pt.add_row([col1, line])
                col1 = ''
        else:
            pt.add_row([k, v])

    if six.PY3:
        print(encodeutils.safe_encode(pt.get_string()).decode())
    else:
        print(encodeutils.safe_encode(pt.get_string()))
Пример #7
0
def pool_entries(controller, pool_ctrl, count):
    """Context manager to create several catalogue entries for testing.

    The entries are automatically deleted when the context manager
    goes out of scope.

    :param controller: storage handler
    :type controller: queues.storage.base:CatalogueBase
    :param count: number of entries to create
    :type count: int
    :returns: [(project, queue, pool)]
    :rtype: [(six.text_type, six.text_type, six.text_type)]
    """
    spec = [(u'_', six.text_type(uuid.uuid1()), six.text_type(i))
            for i in range(count)]

    for p, q, s in spec:
        pool_ctrl.create(s, 100, s)
        controller.insert(p, q, s)

    yield spec

    for p, q, s in spec:
        controller.delete(p, q)
        pool_ctrl.delete(s)
Пример #8
0
def display_chunk(chunk):
    # type: (Any) -> unicode
    if isinstance(chunk, (list, tuple)):
        if len(chunk) == 1:
            return text_type(chunk[0])
        return '%s .. %s' % (chunk[0], chunk[-1])
    return text_type(chunk)
Пример #9
0
    def create(self, req, body):
        """Creates a new share group snapshot."""
        context = req.environ['manila.context']

        if not self.is_valid_body(body, 'share_group_snapshot'):
            msg = _("'share_group_snapshot' is missing from the request body.")
            raise exc.HTTPBadRequest(explanation=msg)

        share_group_snapshot = body.get('share_group_snapshot', {})

        share_group_id = share_group_snapshot.get('share_group_id')
        if not share_group_id:
            msg = _("Must supply 'share_group_id' attribute.")
            raise exc.HTTPBadRequest(explanation=msg)
        if not uuidutils.is_uuid_like(share_group_id):
            msg = _("The 'share_group_id' attribute must be a uuid.")
            raise exc.HTTPBadRequest(explanation=six.text_type(msg))

        kwargs = {"share_group_id": share_group_id}
        if 'name' in share_group_snapshot:
            kwargs['name'] = share_group_snapshot.get('name')
        if 'description' in share_group_snapshot:
            kwargs['description'] = share_group_snapshot.get('description')

        try:
            new_snapshot = self.share_group_api.create_share_group_snapshot(
                context, **kwargs)
        except exception.ShareGroupNotFound as e:
            raise exc.HTTPBadRequest(explanation=six.text_type(e))
        except exception.InvalidShareGroup as e:
            raise exc.HTTPConflict(explanation=six.text_type(e))

        return self._view_builder.detail(req, dict(new_snapshot.items()))
Пример #10
0
 def test_validation_error(self):
     target = uuid.uuid4().hex
     attribute = uuid.uuid4().hex
     e = exception.ValidationError(target=target, attribute=attribute)
     self.assertValidJsonRendering(e)
     self.assertIn(target, six.text_type(e))
     self.assertIn(attribute, six.text_type(e))
Пример #11
0
    def test_node_handle_exception(self, mock_warning):
        ex = exception.ResourceStatusError(resource_id='FAKE_ID',
                                           status='FAKE_STATUS',
                                           reason='FAKE_REASON')
        node = nodem.Node('node1', self.profile.id, None, self.context)
        node.store(self.context)
        node._handle_exception(self.context, 'ACTION', 'STATUS', ex)
        db_node = db_api.node_get(self.context, node.id)
        self.assertEqual(node.ERROR, db_node.status)
        self.assertEqual('Profile failed in ACTIOing resource '
                         '(FAKE_ID) due to: %s' % six.text_type(ex),
                         db_node.status_reason)
        self.assertEqual('FAKE_ID', db_node.physical_id)
        mock_warning.assert_called_with(self.context, node, 'ACTION',
                                        'STATUS', six.text_type(ex))

        # Exception happens before physical node creation started.
        ex = exception.ResourceCreationFailure(rtype='stack',
                                               code=400,
                                               message='Bad request')
        node = nodem.Node('node1', self.profile.id, None, self.context)
        node.store(self.context)
        node._handle_exception(self.context, 'CREATE', 'STATUS', ex)
        db_node = db_api.node_get(self.context, node.id)
        self.assertEqual(node.ERROR, db_node.status)
        self.assertEqual('Profile failed in creating node due to: '
                         '%s' % six.text_type(ex), db_node.status_reason)
        self.assertEqual(None, db_node.physical_id)
        mock_warning.assert_called_with(self.context, node, 'CREATE',
                                        'STATUS', six.text_type(ex))
Пример #12
0
    def encode(self):
        """Encode the packet for transmission.

        If the packet contains binary elements, this function returns a list
        of packets where the first is the original packet with placeholders for
        the binary components and the remaining ones the binary attachments.
        """
        encoded_packet = six.text_type(self.packet_type)
        if self.packet_type == BINARY_EVENT or self.packet_type == BINARY_ACK:
            data, attachments = self._deconstruct_binary(self.data)
            encoded_packet += six.text_type(len(attachments)) + '-'
        else:
            data = self.data
            attachments = None
        needs_comma = False
        if self.namespace is not None and self.namespace != '/':
            encoded_packet += self.namespace
            needs_comma = True
        if self.id is not None:
            if needs_comma:
                encoded_packet += ','
                needs_comma = False
            encoded_packet += six.text_type(self.id)
        if data is not None:
            if needs_comma:
                encoded_packet += ','
            encoded_packet += self.json.dumps(data, separators=(',', ':'))
        if attachments is not None:
            encoded_packet = [encoded_packet] + attachments
        return encoded_packet
Пример #13
0
    def __exit__(self, ex_type, ex_value, ex_traceback):
        if not ex_value:
            return True

        if isinstance(ex_value, exception.NotAuthorized):
            msg = six.text_type(ex_value)
            raise Fault(webob.exc.HTTPForbidden(explanation=msg))
        elif isinstance(ex_value, exception.Invalid):
            raise Fault(exception.ConvertedException(
                code=ex_value.code, explanation=six.text_type(ex_value)))
        elif isinstance(ex_value, TypeError):
            exc_info = (ex_type, ex_value, ex_traceback)
            LOG.error(_(
                'Exception handling resource: %s') %
                ex_value, exc_info=exc_info)
            raise Fault(webob.exc.HTTPBadRequest())
        elif isinstance(ex_value, Fault):
            LOG.info(_("Fault thrown: %s"), six.text_type(ex_value))
            raise ex_value
        elif isinstance(ex_value, webob.exc.HTTPException):
            LOG.info(_("HTTP exception thrown: %s"), six.text_type(ex_value))
            raise Fault(ex_value)

        # We didn't handle the exception
        return False
Пример #14
0
    def _new_session(self, username_key=None, **attributes):
        """
        Create a new session and persist it according to its username and token
        values.

        :param attributes: Keyword parameters containing zero or more of
            ``username``, ``token``, and ``tenant_id``.  Any fields that are
            not specified will be filled out automatically.

        :return: A new session with all fields filled out and an expiration
                 time 1 day in the future (according to the clock associated
                 with this :obj:`MimicCore`).
        :rtype: :obj:`Session`
        """
        for key in ['username', 'token', 'tenant_id']:
            if attributes.get(key, None) is None:
                attributes[key] = key + "_" + text_type(uuid4())
                if key == 'tenant_id':
                    # integer tenant IDs - uuid4 ints are too long
                    attributes[key] = text_type(int(uuid4().int % 1e15))

        if 'expires' not in attributes:
            attributes['expires'] = (
                datetime.utcfromtimestamp(self.clock.seconds())
                + timedelta(days=1)
            )

        session = Session(**attributes)
        if username_key is None:
            username_key = session.username
        self._username_to_token[username_key] = session.token
        self._token_to_session[session.token] = session
        self._userid_to_session[session.user_id] = session
        self._tenant_to_session[session.tenant_id] = session
        return session
Пример #15
0
def test_matrix_environments(tmpdir):
    conf = config.Config()

    conf.env_dir = six.text_type(tmpdir.join("env"))

    conf.pythons = ["2.7", "3.4"]
    conf.matrix = {
        "six": ["1.4", None],
        "colorama": ["0.3.1", "0.3.3"]
    }
    environments = list(environment.get_environments(conf))

    assert len(environments) == 2 * 2 * 2

    # Only test the first two environments, since this is so time
    # consuming
    for env in environments[:2]:
        env.create()

        output = env.run(
            ['-c', 'import six, sys; sys.stdout.write(six.__version__)'],
            valid_return_codes=None)
        if 'six' in env._requirements:
            assert output.startswith(six.text_type(env._requirements['six']))

        output = env.run(
            ['-c', 'import colorama, sys; sys.stdout.write(colorama.__version__)'])
        assert output.startswith(six.text_type(env._requirements['colorama']))
 def test_signal_wrong_input_and_params_type(self):
     tmpl = template_format.parse(workflow_template_full)
     stack = utils.parse_stack(tmpl)
     rsrc_defns = stack.t.resource_definitions(stack)['create_vm']
     wf = workflow.Workflow('create_vm', rsrc_defns, stack)
     self.mistral.workflows.create.return_value = [
         FakeWorkflow('create_vm')]
     scheduler.TaskRunner(wf.create)()
     details = {'input': '3'}
     err = self.assertRaises(exception.ResourceFailure,
                             scheduler.TaskRunner(wf.signal, details))
     if six.PY3:
         entity = 'class'
     else:
         entity = 'type'
     error_message = ("StackValidationFailed: resources.create_vm: "
                      "Signal data error: Input in"
                      " signal data must be a map, find a <%s 'str'>" %
                      entity)
     self.assertEqual(error_message, six.text_type(err))
     details = {'params': '3'}
     err = self.assertRaises(exception.ResourceFailure,
                             scheduler.TaskRunner(wf.signal, details))
     error_message = ("StackValidationFailed: resources.create_vm: "
                      "Signal data error: Params "
                      "must be a map, find a <%s 'str'>" % entity)
     self.assertEqual(error_message, six.text_type(err))
Пример #17
0
 def wrapper(*args, **kwargs):
     try:
         return method(*args, **kwargs)
     except db_exception.DBDuplicateEntry as e:
         # LOG the exception for debug purposes, do not send the
         # exception details out with the raised Conflict exception
         # as it can contain raw SQL.
         LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
                                   'details': six.text_type(e)})
         raise exception.Conflict(type=conflict_type,
                                  details=_('Duplicate Entry'))
     except db_exception.DBError as e:
         # TODO(blk-u): inspecting inner_exception breaks encapsulation;
         # oslo_db should provide exception we need.
         if isinstance(e.inner_exception, IntegrityError):
             # LOG the exception for debug purposes, do not send the
             # exception details out with the raised Conflict exception
             # as it can contain raw SQL.
             LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
                                       'details': six.text_type(e)})
             # NOTE(morganfainberg): This is really a case where the SQL
             # failed to store the data. This is not something that the
             # user has done wrong. Example would be a ForeignKey is
             # missing; the code that is executed before reaching the
             # SQL writing to the DB should catch the issue.
             raise exception.UnexpectedError(
                 _('An unexpected error occurred when trying to '
                   'store %s') % conflict_type)
         raise
Пример #18
0
    def test_accordion_state(self):
        """
        Verify the accordion remembers which chapter you were last viewing.
        """
        email, password = self.STUDENT_INFO[0]
        self.login(email, password)
        self.enroll(self.course, True)
        self.enroll(self.test_course, True)

        # Now we directly navigate to a section in a chapter other than 'Overview'.
        section_url = reverse(
            'courseware_section',
            kwargs={
                'course_id': text_type(self.course.id),
                'chapter': 'factory_chapter',
                'section': 'factory_section',
            }
        )
        self.assert_request_status_code(200, section_url)

        # And now hitting the courseware tab should redirect to 'factory_chapter'
        url = reverse(
            'courseware',
            kwargs={'course_id': text_type(self.course.id)}
        )
        resp = self.client.get(url)
        self.assertRedirects(resp, section_url)
Пример #19
0
 def change(self, world, making_change=True):
     'Alter the state of the Item to the new (or old) one.'
     # If attributes are missing, indicating that any values work for this 
     # modify event, they are set with using the values in the world at 
     # this point. This allows the event to be undone later with the
     # correct old value put back into place.
     #
     item = world.item[self.direct]
     if not hasattr(self, 'old_value'):
         self.old_value = getattr(item, self.feature)
     # If the event failed, it itself had no consequence in the world.
     # Thus there is nothing to do or reverse.
     if len(self.failed) > 0:
         return
     # Make the change.
     value = (self.old_value, self.new_value)[making_change]
     setattr(item, self.feature, value)
     # Update the item in actors who can perceive this event. Also, check
     # to see if the actor's room became visible and needs an update.
     if making_change:
         for actor in world.concept:
             if (actor in [self.agent, self.direct] or 
                 world.can_see(actor, self.direct)):
                 world.transfer(item, actor, self.end)
             room_tag = text_type(world.room_of(actor))
             if (room_tag in world.concept[actor].item and
                 world.concept[actor].item[room_tag].blanked and
                 world.can_see(actor, room_tag)):
                 world.transfer(world.item[room_tag], actor, self.end)
                 look_at = Sense('examine', actor, 
                                 modality='sight', direct=room_tag)
                 look_at.cause = ':' + text_type(self.id) + ':'
                 self.enlightened.append(look_at)
Пример #20
0
    def _volume_upload_image(self, req, id, body):
        """Uploads the specified volume to image service."""
        context = req.environ['cinder.context']
        params = body['os-volume_upload_image']
        req_version = req.api_version_request

        force = params.get('force', 'False')
        force = strutils.bool_from_string(force, strict=True)

        # Not found exception will be handled at the wsgi level
        volume = self.volume_api.get(context, id)

        context.authorize(policy.UPLOAD_IMAGE_POLICY)
        # check for valid disk-format
        disk_format = params.get("disk_format", "raw")

        image_metadata = {"container_format": params.get(
            "container_format", "bare"),
            "disk_format": disk_format,
            "name": params["image_name"]}

        if volume.encryption_key_id:
            # Clone volume encryption key: the current key cannot
            # be reused because it will be deleted when the volume is
            # deleted.
            # TODO(eharney): Currently, there is no mechanism to remove
            # these keys, because Glance will not delete the key from
            # Barbican when the image is deleted.
            encryption_key_id = self._key_manager.store(
                context,
                self._key_manager.get(context, volume.encryption_key_id))

            image_metadata['cinder_encryption_key_id'] = encryption_key_id

        if req_version >= mv.get_api_version(
                mv.UPLOAD_IMAGE_PARAMS):

            image_metadata['visibility'] = params.get('visibility', 'private')
            image_metadata['protected'] = strutils.bool_from_string(
                params.get('protected', 'False'), strict=True)

            if image_metadata['visibility'] == 'public':
                context.authorize(policy.UPLOAD_PUBLIC_POLICY)

        try:
            response = self.volume_api.copy_volume_to_image(context,
                                                            volume,
                                                            image_metadata,
                                                            force)
        except exception.InvalidVolume as error:
            raise webob.exc.HTTPBadRequest(explanation=error.msg)
        except ValueError as error:
            raise webob.exc.HTTPBadRequest(explanation=six.text_type(error))
        except messaging.RemoteError as error:
            msg = "%(err_type)s: %(err_msg)s" % {'err_type': error.exc_type,
                                                 'err_msg': error.value}
            raise webob.exc.HTTPBadRequest(explanation=msg)
        except Exception as error:
            raise webob.exc.HTTPBadRequest(explanation=six.text_type(error))
        return {'os-volume_upload_image': response}
Пример #21
0
def unicode_key_value_to_string(dictionary):
    """Recursively converts dictionary keys to strings."""
    if not isinstance(dictionary, dict):
        return dictionary
    return dict((six.text_type(k),
                 six.text_type(unicode_key_value_to_string(v)))
                for k, v in dictionary.items())
Пример #22
0
 def _save_and_activate_cfg(self, checksum, activate, active_cfg_name):
     body = {"checksum": checksum}
     json_str = json.dumps(body)
     url = self._build_url(rest_constants.PATCH_CFG_SAVE)
     response = self.session.patch(url, data=json_str)
     if response.status_code == 204:
         LOG.info("REST cfg save success")
     else:
         msg = (_("REST cfg save failed: %s")
                % six.text_type(response.text))
         LOG.error(msg)
         raise exception.BrocadeZoningRestException(reason=msg)
     # if activate=true, then enable the cfg changes to effective cfg
     if activate:
         checksum = self._get_checksum()
         body = {"checksum": checksum}
         json_str = json.dumps(body)
         url = self._build_url(rest_constants.PATCH_CFG_ENABLE
                               + active_cfg_name)
         response = self.session.patch(url, data=json_str)
         if response.status_code == 204:
             LOG.info("REST cfg activate success: %s", active_cfg_name)
         else:
             msg = (_("REST cfg activate failed: %s")
                    % six.text_type(response.text))
             LOG.error(msg)
             raise exception.BrocadeZoningRestException(reason=msg)
Пример #23
0
    def test_simple(self):
        user = self.create_user()

        org = self.create_organization(owner=user)

        auth_provider = AuthProvider.objects.create(
            organization=org,
            provider='dummy',
        )
        auth_identity = AuthIdentity.objects.create(
            auth_provider=auth_provider,
            ident=user.email,
            user=user,
        )
        auth = Authenticator.objects.create(
            type=available_authenticators(ignore_backup=True)[0].type,
            user=user,
        )

        result = serialize(user, user, DetailedUserSerializer())
        assert result['id'] == six.text_type(user.id)
        assert result['has2fa'] is True
        assert len(result['emails']) == 1
        assert result['emails'][0]['email'] == user.email
        assert result['emails'][0]['is_verified']
        assert 'identities' in result
        assert len(result['identities']) == 1
        assert result['identities'][0]['id'] == six.text_type(auth_identity.id)
        assert result['identities'][0]['name'] == auth_identity.ident
        assert 'authenticators' in result
        assert len(result['authenticators']) == 1
        assert result['authenticators'][0]['id'] == six.text_type(auth.id)
Пример #24
0
    def output_param_to_log(self, storage_protocol):
        essential_inherited_param = ['volume_backend_name', 'volume_driver']
        conf = self.configuration

        msg = basic_lib.set_msg(1, config_group=conf.config_group)
        LOG.info(msg)
        version = self.command.get_comm_version()
        if conf.hitachi_unit_name:
            prefix = 'HSNM2 version'
        else:
            prefix = 'RAID Manager version'
        LOG.info('\t%-35s%s' % (prefix + ': ', six.text_type(version)))
        for param in essential_inherited_param:
            value = conf.safe_get(param)
            LOG.info('\t%-35s%s' % (param + ': ', six.text_type(value)))
        for opt in volume_opts:
            if not opt.secret:
                value = getattr(conf, opt.name)
                LOG.info('\t%-35s%s' % (opt.name + ': ',
                         six.text_type(value)))

        if storage_protocol == 'iSCSI':
            value = getattr(conf, 'hitachi_group_request')
            LOG.info('\t%-35s%s' % ('hitachi_group_request: ',
                     six.text_type(value)))
Пример #25
0
 def copy_sync_data(self, src_ldev, dest_ldev, size):
     src_vol = {'provider_location': six.text_type(src_ldev),
                'id': 'src_vol'}
     dest_vol = {'provider_location': six.text_type(dest_ldev),
                 'id': 'dest_vol'}
     properties = utils.brick_get_connector_properties()
     driver = self.generated_from
     src_info = None
     dest_info = None
     try:
         dest_info = driver._attach_volume(self.context, dest_vol,
                                           properties)
         src_info = driver._attach_volume(self.context, src_vol,
                                          properties)
         volume_utils.copy_volume(src_info['device']['path'],
                                  dest_info['device']['path'], size * 1024,
                                  self.configuration.volume_dd_blocksize)
     finally:
         if dest_info:
             driver._detach_volume(self.context, dest_info,
                                   dest_vol, properties)
         if src_info:
             driver._detach_volume(self.context, src_info,
                                   src_vol, properties)
     self.command.discard_zero_page(dest_ldev)
Пример #26
0
    def unmanage(self, req, id):
        """Unmanage a share."""
        context = req.environ['manila.context']
        authorize(context)

        LOG.info(_LI("Unmanage share with id: %s"), id, context=context)

        try:
            share = self.share_api.get(context, id)
            if share.get('share_server_id'):
                msg = _("Operation 'unmanage' is not supported for shares "
                        "that are created on top of share servers "
                        "(created with share-networks).")
                raise exc.HTTPForbidden(explanation=msg)
            elif share['status'] in constants.TRANSITIONAL_STATUSES:
                msg = _("Share with transitional state can not be unmanaged. "
                        "Share '%(s_id)s' is in '%(state)s' state.") % dict(
                            state=share['status'], s_id=share['id'])
                raise exc.HTTPForbidden(explanation=msg)
            snapshots = self.share_api.db.share_snapshot_get_all_for_share(
                context, id)
            if snapshots:
                msg = _("Share '%(s_id)s' can not be unmanaged because it has "
                        "'%(amount)s' dependent snapshot(s).") % {
                            's_id': id, 'amount': len(snapshots)}
                raise exc.HTTPForbidden(explanation=msg)
            self.share_api.unmanage(context, share)
        except exception.NotFound as e:
            raise exc.HTTPNotFound(explanation=six.text_type(e))
        except (exception.InvalidShare, exception.PolicyNotAuthorized) as e:
            raise exc.HTTPForbidden(explanation=six.text_type(e))

        return webob.Response(status_int=202)
Пример #27
0
    def test_get_network_id_by_label(self):
        """Tests the get_net_id_by_label function."""
        net = mock.MagicMock()
        net.id = str(uuid.uuid4())
        self.nova_client.networks.find.side_effect = [
            net, nova_exceptions.NotFound(404),
            nova_exceptions.NoUniqueMatch()]
        self.assertEqual(net.id,
                         self.nova_plugin.get_net_id_by_label('net_label'))

        exc = self.assertRaises(
            exception.NovaNetworkNotFound,
            self.nova_plugin.get_net_id_by_label, 'idontexist')
        expected = 'The Nova network (idontexist) could not be found'
        self.assertIn(expected, six.text_type(exc))
        exc = self.assertRaises(
            exception.PhysicalResourceNameAmbiguity,
            self.nova_plugin.get_net_id_by_label, 'notUnique')
        expected = ('Multiple physical resources were found '
                    'with name (notUnique)')
        self.assertIn(expected, six.text_type(exc))
        calls = [mock.call(label='net_label'),
                 mock.call(label='idontexist'),
                 mock.call(label='notUnique')]
        self.nova_client.networks.find.assert_has_calls(calls)
Пример #28
0
 def _get_context(self, networks_per_tenant=2, neutron_network_provider=True):
     tenants = {}
     for t_id in range(self.TENANTS_AMOUNT):
         tenants[six.text_type(t_id)] = {"name": six.text_type(t_id)}
         tenants[six.text_type(t_id)]["networks"] = []
         for i in range(networks_per_tenant):
             network = {"id": "fake_net_id_%s" % i}
             if neutron_network_provider:
                 network["subnets"] = ["fake_subnet_id_of_net_%s" % i]
             else:
                 network["cidr"] = "101.0.5.0/24"
             tenants[six.text_type(t_id)]["networks"].append(network)
     users = []
     for t_id in tenants.keys():
         for i in range(self.USERS_PER_TENANT):
             users.append({"id": i, "tenant_id": t_id, "credential": "fake"})
     context = {
         "config": {
             "users": {
                 "tenants": self.TENANTS_AMOUNT,
                 "users_per_tenant": self.USERS_PER_TENANT,
                 "random_user_choice": False,
             },
             consts.SHARE_NETWORKS_CONTEXT_NAME: {"use_share_networks": True, "share_networks": []},
             "network": {"networks_per_tenant": networks_per_tenant, "start_cidr": "101.0.5.0/24"},
         },
         "admin": {"credential": mock.MagicMock()},
         "task": mock.MagicMock(),
         "users": users,
         "tenants": tenants,
     }
     return context
Пример #29
0
def store_add_to_backend(image_id, data, size, store):
    """
    A wrapper around a call to each stores add() method.  This gives glance
    a common place to check the output

    :param image_id:  The image add to which data is added
    :param data: The data to be stored
    :param size: The length of the data in bytes
    :param store: The store to which the data is being added
    :return: The url location of the file,
             the size amount of data,
             the checksum of the data
             the storage systems metadata dictionary for the location
    """
    (location, size, checksum, metadata) = store.add(image_id, data, size)
    if metadata is not None:
        if not isinstance(metadata, dict):
            msg = (_("The storage driver %(store)s returned invalid metadata "
                     "%(metadata)s. This must be a dictionary type") %
                   {'store': six.text_type(store),
                    'metadata': six.text_type(metadata)})
            LOG.error(msg)
            raise BackendException(msg)
        try:
            check_location_metadata(metadata)
        except BackendException as e:
            e_msg = (_("A bad metadata structure was returned from the "
                       "%(store)s storage driver: %(metadata)s.  %(error)s.") %
                     {'store': six.text_type(store),
                      'metadata': six.text_type(metadata),
                      'error': six.text_type(e)})
            LOG.error(e_msg)
            raise BackendException(e_msg)
    return (location, size, checksum, metadata)
Пример #30
0
 def _test_create_image_from_server_wait_until_active_not_found(
         self, wait_for_image_status, compute_images_client,
         servers_client, fault=None):
     # setup mocks
     image_id = uuidutils.generate_uuid()
     fake_image = mock.Mock(response={'location': image_id})
     compute_images_client.create_image.return_value = fake_image
     fake_server = {'id': mock.sentinel.server_id}
     if fault:
         fake_server['fault'] = fault
     servers_client.show_server.return_value = {'server': fake_server}
     # call the utility method
     ex = self.assertRaises(
         exceptions.SnapshotNotFoundException,
         compute_base.BaseV2ComputeTest.create_image_from_server,
         mock.sentinel.server_id, wait_until='active')
     # make our assertions
     if fault:
         self.assertIn(fault, six.text_type(ex))
     else:
         self.assertNotIn(fault, six.text_type(ex))
     if compute_base.BaseV2ComputeTest.is_requested_microversion_compatible(
         '2.35'):
         status = 'ACTIVE'
     else:
         status = 'active'
     wait_for_image_status.assert_called_once_with(
         compute_images_client, image_id, status)
     servers_client.show_server.assert_called_once_with(
         mock.sentinel.server_id)
Пример #31
0
 def __str__(self):
     """Return a string representation of the parameter."""
     if self.hidden():
         return six.text_type('******')
     else:
         return self._rawstring()
Пример #32
0
def _load_log_config(log_config_append):
    try:
        logging.config.fileConfig(log_config_append,
                                  disable_existing_loggers=False)
    except (moves.configparser.Error, KeyError) as exc:
        raise LogConfigError(log_config_append, six.text_type(exc))
Пример #33
0
    def get_tag_value_paginator_for_projects(self,
                                             projects,
                                             environments,
                                             key,
                                             start=None,
                                             end=None,
                                             query=None,
                                             order_by="-last_seen"):
        from sentry.api.paginator import SequencePaginator

        if not order_by == "-last_seen":
            raise ValueError("Unsupported order_by: %s" % order_by)

        snuba_key = snuba.get_snuba_column_name(key)

        conditions = []

        if key in FUZZY_NUMERIC_KEYS:
            converted_query = int(
                query) if query is not None and query.isdigit() else None
            if converted_query is not None:
                conditions.append([
                    snuba_key, ">=", converted_query - FUZZY_NUMERIC_DISTANCE
                ])
                conditions.append([
                    snuba_key, "<=", converted_query + FUZZY_NUMERIC_DISTANCE
                ])
        else:
            if snuba_key in BLACKLISTED_COLUMNS:
                snuba_key = "tags[%s]" % (key, )

            if query:
                conditions.append([snuba_key, "LIKE", u"%{}%".format(query)])
            else:
                conditions.append([snuba_key, "!=", ""])

        filters = {"project_id": projects}
        if environments:
            filters["environment"] = environments

        results = snuba.query(
            start=start,
            end=end,
            groupby=[snuba_key],
            filter_keys=filters,
            aggregations=[
                ["count()", "", "times_seen"],
                ["min", "timestamp", "first_seen"],
                ["max", "timestamp", "last_seen"],
            ],
            conditions=conditions,
            orderby=order_by,
            # TODO: This means they can't actually paginate all TagValues.
            limit=1000,
            arrayjoin=snuba.get_arrayjoin(snuba_key),
            referrer="tagstore.get_tag_value_paginator_for_projects",
        )

        tag_values = [
            TagValue(key=key,
                     value=six.text_type(value),
                     **fix_tag_value_data(data))
            for value, data in six.iteritems(results)
        ]

        desc = order_by.startswith("-")
        score_field = order_by.lstrip("-")
        return SequencePaginator(
            [(int(to_timestamp(getattr(tv, score_field)) * 1000), tv)
             for tv in tag_values],
            reverse=desc,
        )
Пример #34
0
            def sanitize_utf8(s):
                if not isinstance(s, six.text_type):
                    s = six.text_type(s, 'utf-8')

                return utf8_4byte_re.sub(u"\uFFFD", s)
Пример #35
0
 def __init__(self, path, overwrite):
     self.path = six.text_type(path)
     self.overwrite = overwrite
Пример #36
0
 def _validate(self, val, context):
     try:
         strutils.bool_from_string(val, strict=True)
     except ValueError as ex:
         raise exception.StackValidationFailed(message=six.text_type(ex))
     self.schema.validate_value(val, context)
Пример #37
0
 def _validate(self, val, context):
     try:
         Schema.str_to_num(val)
     except ValueError as ex:
         raise exception.StackValidationFailed(message=six.text_type(ex))
     self.schema.validate_value(val, context)
Пример #38
0
 def Handle(self, exc):
     code = getattr(exc, 'exit_code', 1) if exc else 0
     message = six.text_type(exc) if exc else None
     return self.HandleReturnCode(code, message)
Пример #39
0
 def get_timezone(self, obj):
     return six.text_type(obj.timezone)
Пример #40
0
    def validate(self, value, model_instance):
        # decrypt secret values so we can validate their contents (i.e.,
        # ssh_key_data format)

        if not isinstance(value, dict):
            return super(CredentialInputField,
                         self).validate(value, model_instance)

        # Backwards compatability: in prior versions, if you submit `null` for
        # a credential field value, it just considers the value an empty string
        for unset in [
                key for key, v in model_instance.inputs.items() if not v
        ]:
            default_value = model_instance.credential_type.default_for_field(
                unset)
            if default_value is not None:
                model_instance.inputs[unset] = default_value

        decrypted_values = {}
        for k, v in value.items():
            if all([
                    k in model_instance.credential_type.secret_fields,
                    v != '$encrypted$', model_instance.pk
            ]):
                if not isinstance(getattr(model_instance, k),
                                  six.string_types):
                    raise django_exceptions.ValidationError(
                        _('secret values must be of type string, not {}').
                        format(type(v).__name__),
                        code='invalid',
                        params={'value': v},
                    )
                decrypted_values[k] = utils.decrypt_field(model_instance, k)
            else:
                decrypted_values[k] = v

        super(JSONSchemaField, self).validate(decrypted_values, model_instance)
        errors = {}
        for error in Draft4Validator(
                self.schema(model_instance),
                format_checker=self.format_checker).iter_errors(
                    decrypted_values):
            if error.validator == 'pattern' and 'error' in error.schema:
                error.message = six.text_type(
                    error.schema['error']).format(instance=error.instance)
            if error.validator == 'dependencies':
                # replace the default error messaging w/ a better i18n string
                # I wish there was a better way to determine the parameters of
                # this validation failure, but the exception jsonschema raises
                # doesn't include them as attributes (just a hard-coded error
                # string)
                match = re.search(
                    # 'foo' is a dependency of 'bar'
                    "'"  # apostrophe
                    "([^']+)"  # one or more non-apostrophes (first group)
                    "'[\w ]+'"  # one or more words/spaces
                    "([^']+)",  # second group
                    error.message,
                )
                if match:
                    label, extraneous = match.groups()
                    if error.schema['properties'].get(label):
                        label = error.schema['properties'][label]['label']
                    errors[extraneous] = [
                        _('cannot be set unless "%s" is set') % label
                    ]
                    continue
            if 'id' not in error.schema:
                # If the error is not for a specific field, it's specific to
                # `inputs` in general
                raise django_exceptions.ValidationError(
                    error.message,
                    code='invalid',
                    params={'value': value},
                )
            errors[error.schema['id']] = [error.message]

        inputs = model_instance.credential_type.inputs
        for field in inputs.get('required', []):
            if not value.get(field, None):
                errors[field] = [
                    _('required for %s') %
                    (model_instance.credential_type.name)
                ]

        # `ssh_key_unlock` requirements are very specific and can't be
        # represented without complicated JSON schema
        if (model_instance.credential_type.managed_by_tower is True
                and 'ssh_key_unlock'
                in model_instance.credential_type.defined_fields):

            # in order to properly test the necessity of `ssh_key_unlock`, we
            # need to know the real value of `ssh_key_data`; for a payload like:
            # {
            #   'ssh_key_data': '$encrypted$',
            #   'ssh_key_unlock': 'do-you-need-me?',
            # }
            # ...we have to fetch the actual key value from the database
            if model_instance.pk and model_instance.ssh_key_data == '$encrypted$':
                model_instance.ssh_key_data = model_instance.__class__.objects.get(
                    pk=model_instance.pk).ssh_key_data

            if model_instance.has_encrypted_ssh_key_data and not value.get(
                    'ssh_key_unlock'):
                errors['ssh_key_unlock'] = [
                    _('must be set when SSH key is encrypted.')
                ]
            if all([
                    model_instance.ssh_key_data,
                    value.get('ssh_key_unlock'),
                    not model_instance.has_encrypted_ssh_key_data
            ]):
                errors['ssh_key_unlock'] = [
                    _('should not be set when SSH key is not encrypted.')
                ]

        if errors:
            raise serializers.ValidationError({'inputs': errors})
Пример #41
0
 def __unicode__(self):
     enabled_label = "Enabled" if self.enabled else "Not Enabled"
     return u"Course '{}': Persistent Grades {}".format(text_type(self.course_id), enabled_label)
Пример #42
0
    def _draw_text_as_text(self, gc, x, y, s, prop, angle, ismath, mtext=None):
        writer = self.writer

        color = rgb2hex(gc.get_rgb())
        style = {}
        if color != '#000000':
            style['fill'] = color
        if gc.get_alpha() != 1.0:
            style['opacity'] = short_float_fmt(gc.get_alpha())

        if not ismath:
            font = self._get_font(prop)
            font.set_text(s, 0.0, flags=LOAD_NO_HINTING)

            fontsize = prop.get_size_in_points()

            fontfamily = font.family_name
            fontstyle = prop.get_style()

            attrib = {}
            # Must add "px" to workaround a Firefox bug
            style['font-size'] = short_float_fmt(fontsize) + 'px'
            style['font-family'] = six.text_type(fontfamily)
            style['font-style'] = prop.get_style().lower()
            style['font-weight'] = six.text_type(prop.get_weight()).lower()
            attrib['style'] = generate_css(style)

            if mtext and (angle == 0 or mtext.get_rotation_mode() == "anchor"):
                # If text anchoring can be supported, get the original
                # coordinates and add alignment information.

                # Get anchor coordinates.
                transform = mtext.get_transform()
                ax, ay = transform.transform_point(mtext.get_position())
                ay = self.height - ay

                # Don't do vertical anchor alignment. Most applications do not
                # support 'alignment-baseline' yet. Apply the vertical layout
                # to the anchor point manually for now.
                angle_rad = np.deg2rad(angle)
                dir_vert = np.array([np.sin(angle_rad), np.cos(angle_rad)])
                v_offset = np.dot(dir_vert, [(x - ax), (y - ay)])
                ax = ax + v_offset * dir_vert[0]
                ay = ay + v_offset * dir_vert[1]

                ha_mpl_to_svg = {'left': 'start', 'right': 'end',
                                 'center': 'middle'}
                style['text-anchor'] = ha_mpl_to_svg[mtext.get_ha()]

                attrib['x'] = short_float_fmt(ax)
                attrib['y'] = short_float_fmt(ay)
                attrib['style'] = generate_css(style)
                attrib['transform'] = "rotate(%s, %s, %s)" % (
                    short_float_fmt(-angle),
                    short_float_fmt(ax),
                    short_float_fmt(ay))
                writer.element('text', s, attrib=attrib)
            else:
                attrib['transform'] = generate_transform([
                    ('translate', (x, y)),
                    ('rotate', (-angle,))])

                writer.element('text', s, attrib=attrib)

            if rcParams['svg.fonttype'] == 'svgfont':
                fontset = self._fonts.setdefault(font.fname, set())
                for c in s:
                    fontset.add(ord(c))
        else:
            writer.comment(s)

            width, height, descent, svg_elements, used_characters = \
                   self.mathtext_parser.parse(s, 72, prop)
            svg_glyphs = svg_elements.svg_glyphs
            svg_rects = svg_elements.svg_rects

            attrib = {}
            attrib['style'] = generate_css(style)
            attrib['transform'] = generate_transform([
                ('translate', (x, y)),
                ('rotate', (-angle,))])

            # Apply attributes to 'g', not 'text', because we likely
            # have some rectangles as well with the same style and
            # transformation
            writer.start('g', attrib=attrib)

            writer.start('text')

            # Sort the characters by font, and output one tspan for
            # each
            spans = OrderedDict()
            for font, fontsize, thetext, new_x, new_y, metrics in svg_glyphs:
                style = generate_css({
                    'font-size': short_float_fmt(fontsize) + 'px',
                    'font-family': font.family_name,
                    'font-style': font.style_name.lower(),
                    'font-weight': font.style_name.lower()})
                if thetext == 32:
                    thetext = 0xa0 # non-breaking space
                spans.setdefault(style, []).append((new_x, -new_y, thetext))

            if rcParams['svg.fonttype'] == 'svgfont':
                for font, fontsize, thetext, new_x, new_y, metrics in svg_glyphs:
                    fontset = self._fonts.setdefault(font.fname, set())
                    fontset.add(thetext)

            for style, chars in six.iteritems(spans):
                chars.sort()

                same_y = True
                if len(chars) > 1:
                    last_y = chars[0][1]
                    for i in xrange(1, len(chars)):
                        if chars[i][1] != last_y:
                            same_y = False
                            break
                if same_y:
                    ys = six.text_type(chars[0][1])
                else:
                    ys = ' '.join(six.text_type(c[1]) for c in chars)

                attrib = {
                    'style': style,
                    'x': ' '.join(short_float_fmt(c[0]) for c in chars),
                    'y': ys
                    }

                writer.element(
                    'tspan',
                    ''.join(unichr(c[2]) for c in chars),
                    attrib=attrib)

            writer.end('text')

            if len(svg_rects):
                for x, y, width, height in svg_rects:
                    writer.element(
                        'rect',
                        x=short_float_fmt(x),
                        y=short_float_fmt(-y + height),
                        width=short_float_fmt(width),
                        height=short_float_fmt(height)
                        )

            writer.end('g')
Пример #43
0
 def test_convert_image_with_prlimit_fail(self, path, mocked_execute):
     mocked_execute.side_effect = \
         processutils.ProcessExecutionError(exit_code=-9)
     exc = self.assertRaises(exception.InvalidDiskInfo,
                             images.qemu_img_info, '/fake/path')
     self.assertIn('qemu-img aborted by prlimits', six.text_type(exc))
Пример #44
0
def _StrFtime(dt, fmt):
    """Convert strftime exceptions to Datetime Errors."""
    try:
        return dt.strftime(fmt)
    except (AttributeError, OverflowError, TypeError, ValueError) as e:
        raise DateTimeValueError(six.text_type(e))
Пример #45
0
 def __unicode__(self):
     return u"Course: {}, enabled: {}".format(six.text_type(self.course_key), self.enabled)
Пример #46
0
def wl2qlc(header, data, filename='', formatter='concept', **keywords):
    """
    Write the basic data of a wordlist to file.
    """
    util.setdefaults(keywords,
                     ignore=['taxa', 'doculects', 'msa'],
                     fileformat='qlc',
                     prettify=True)
    if keywords['ignore'] == 'all':
        keywords['ignore'] = [
            'taxa', 'scorer', 'meta', 'distances', 'doculects', 'msa', 'json'
        ]

    formatter = formatter.upper()
    if not filename:
        filename = rcParams['filename']

    # create output string
    out = '# Wordlist\n' if keywords['prettify'] else ''

    # write meta to file
    meta = keywords.get("meta", {})
    kvpairs = {}
    jsonpairs = {}
    msapairs = {}
    trees = {}
    distances = ''
    taxa = ''
    scorer = ''

    for k, v in meta.items():
        # simple key-value-pairs
        if isinstance(v, (text_type, int)) or k == "tree":
            kvpairs[k] = v
        elif k == 'msa' and k not in keywords['ignore']:
            # go a level deeper, checking for keys
            for ref in v:
                if ref not in msapairs:
                    msapairs[ref] = {}
                for a, b in v[ref].items():
                    msapairs[ref][a] = b
        elif k == 'distances':
            distances = matrix2dst(v, meta['taxa'])
        elif k in ['taxa', 'doculect', 'taxon', 'doculects']:
            # we need to find a better solution here, since it is not nice to
            # have taxa written to json again and again
            pass
        elif k == 'trees' and k not in keywords['ignore']:
            trees = ''
            for key, value in v.items():
                trees += '<tre id="{0}">\n{1}\n</tre>\n'.format(key, value)
        elif k == 'scorer' and k not in keywords['ignore']:
            for key, value in v.items():
                scorer += '<{2} id="{0}">\n{1}</{2}>\n\n'.format(
                    key, scorer2str(value), k)
        else:
            # check whether serialization works
            try:
                json.dumps(v)
                jsonpairs[k] = v
            except TypeError:
                pass

    if kvpairs and 'meta' not in keywords['ignore']:
        out += '\n# META\n' if keywords['prettify'] else ''
        for k, v in sorted(kvpairs.items(), key=lambda x: x[0]):
            out += '@{0}:{1}\n'.format(k, v)
    if taxa and keywords['taxa']:
        out += '\n# TAXA\n<taxa>\n' + taxa + '\n</taxa>\n'
    if jsonpairs and 'json' not in keywords['ignore']:
        out += "@json: " + json.dumps(jsonpairs) + '\n'
    if msapairs and 'msa' not in keywords['ignore']:
        for ref in msapairs:
            out += "\n# MSA reference: {0}\n".format(ref)
            for k, v in msapairs[ref].items():
                if 'consensus' in v:
                    out += '#\n<msa '
                    out += 'id="{0}" ref="{1}" consensus="{2}">\n'.format(
                        k, ref, ' '.join(v['consensus']))
                else:
                    out += '#\n<msa id="{0}" ref="{1}">\n'.format(k, ref)
                outs = msa2str(v, wordlist=True)
                out += outs
                out += "</msa>\n"

    if distances and 'distances' not in keywords['ignore']:
        out += '\n# DISTANCES\n<dst>\n'
        out += distances + '</dst>\n'

    if trees:
        out += '\n# TREES\n' + trees

    if scorer and 'scorer' not in keywords['ignore']:
        out += '\n# SCORER\n' + scorer

    out += '\n# DATA\n' if keywords['prettify'] else ''
    out += 'ID\t' + '\t'.join(header) + '\n'

    # check for gloss in header to create nice output format
    if formatter in header:
        idx = header.index(formatter)
        formatter = None
        sorted_data = sorted(data.keys(), key=lambda x: data[x][idx])
    elif len(formatter.split(',')) == 2:
        idxA, idxB = formatter.split(',')
        idxA = header.index(idxA)
        idxB = header.index(idxB)
        idx = idxA
        formatter = None
        sorted_data = sorted(data.keys(),
                             key=lambda x: (data[x][idxA], data[x][idxB]))
    else:
        idx = False
        formatter = ''
        sorted_data = sorted(data.keys())

    for key in sorted_data:
        # get the line
        line = data[key]

        # check for formatter
        if idx in range(len(line)):
            if line[idx] != formatter:
                out += '#\n' if keywords['prettify'] else ''
                formatter = line[idx]

        # add the key
        out += text_type(key)

        # add the rest of the values
        for value in line:
            if type(value) == list:
                try:
                    out += '\t' + ' '.join(value)
                except:
                    out += '\t' + ' '.join([text_type(v) for v in value])
            elif type(value) == int:
                out += '\t' + text_type(value)
            elif type(value) == float:
                out += '\t{0:.4f}'.format(value)
            else:
                out += '\t' + value
        out += '\n'

    util.write_text_file(filename + '.' + keywords['fileformat'],
                         out + keywords.get('stamp', ''),
                         normalize="NFC")
    return
Пример #47
0
def get_course_about_section(request, course, section_key):
    """
    This returns the snippet of html to be rendered on the course about page,
    given the key for the section.

    Valid keys:
    - overview
    - about_sidebar_html
    - short_description
    - description
    - key_dates (includes start, end, exams, etc)
    - video
    - course_staff_short
    - course_staff_extended
    - requirements
    - syllabus
    - textbook
    - faq
    - effort
    - more_info
    - ocw_links
    """

    # Many of these are stored as html files instead of some semantic
    # markup. This can change without effecting this interface when we find a
    # good format for defining so many snippets of text/html.

    html_sections = {
        'short_description', 'description', 'key_dates', 'video',
        'course_staff_short', 'course_staff_extended', 'requirements',
        'syllabus', 'textbook', 'faq', 'more_info', 'overview', 'effort',
        'end_date', 'prerequisites', 'about_sidebar_html', 'ocw_links'
    }

    if section_key in html_sections:
        try:
            loc = course.location.replace(category='about', name=section_key)

            # Use an empty cache
            field_data_cache = FieldDataCache([], course.id, request.user)
            about_module = get_module(
                request.user,
                request,
                loc,
                field_data_cache,
                log_if_not_found=False,
                wrap_xmodule_display=False,
                static_asset_path=course.static_asset_path,
                course=course)

            html = ''

            if about_module is not None:
                try:
                    html = about_module.render(STUDENT_VIEW).content
                except Exception:  # pylint: disable=broad-except
                    html = render_to_string('courseware/error-message.html',
                                            None)
                    log.exception(u"Error rendering course=%s, section_key=%s",
                                  course, section_key)
            return html

        except ItemNotFoundError:
            log.warning(u"Missing about section %s in course %s", section_key,
                        text_type(course.location))
            return None

    raise KeyError("Invalid about key " + str(section_key))
Пример #48
0
    def test_serial_no_valid_host_then_pass_with_third_host(self):
        """Creates 2 servers in order (not a multi-create request) in an
        anti-affinity group so there will be 1 server on each host. Then
        attempts to live migrate the first server which will fail because the
        only other available host will be full. Then starts up a 3rd compute
        service and retries the live migration which should then pass.
        """
        # Create the anti-affinity group used for the servers.
        group = self.api.post_server_groups(
            {'name': 'test_serial_no_valid_host_then_pass_with_third_host',
             'policies': ['anti-affinity']})
        servers = []
        for _ in range(2):
            server = self._build_server(networks='none')
            # Add the group hint so the server is created in our group.
            server_req = {
                'server': server,
                'os:scheduler_hints': {'group': group['id']}
            }
            # Use microversion 2.37 for passing networks='none'.
            with utils.temporary_mutation(self.api, microversion='2.37'):
                server = self.api.post_server(server_req)
                servers.append(
                    self._wait_for_state_change(server, 'ACTIVE'))

        # Make sure each server is on a unique host.
        hosts = set([svr['OS-EXT-SRV-ATTR:host'] for svr in servers])
        self.assertEqual(2, len(hosts))

        # And make sure the group has 2 members.
        members = self.api.get_server_group(group['id'])['members']
        self.assertEqual(2, len(members))

        # Now attempt to live migrate one of the servers which should fail
        # because we don't have a free host. Since we're using microversion 2.1
        # the scheduling will be synchronous and we should get back a 400
        # response for the NoValidHost error.
        body = {
            'os-migrateLive': {
                'host': None,
                'block_migration': False,
                'disk_over_commit': False
            }
        }
        # Specifically use the first server since that was the first member
        # added to the group.
        server = servers[0]
        ex = self.assertRaises(client.OpenStackApiException,
                               self.admin_api.post_server_action,
                               server['id'], body)
        self.assertEqual(400, ex.response.status_code)
        self.assertIn('No valid host', six.text_type(ex))

        # Now start up a 3rd compute service and retry the live migration which
        # should work this time.
        self.start_service('compute', host='host3')
        self.admin_api.post_server_action(server['id'], body)
        server = self._wait_for_state_change(server, 'ACTIVE')
        # Now the server should be on host3 since that was the only available
        # host for the live migration.
        self.assertEqual('host3', server['OS-EXT-SRV-ATTR:host'])
Пример #49
0
    def test_populate(self):
        _expected = self.make_expected()

        # Unhappy Plan - Not authenticated
        # Live test against API, will fail in an unauthorized error
        with self.assertRaises(WebFault) as ctx:
            provider = TransipProvider('test', 'unittest', self.bogus_key)
            zone = Zone('unit.tests.', [])
            provider.populate(zone, True)

        self.assertEquals(str('WebFault'),
                          str(ctx.exception.__class__.__name__))

        self.assertEquals(str('200'), ctx.exception.fault.faultcode)

        # Unhappy Plan - Zone does not exists
        # Will trigger an exception if provider is used as a target for a
        # non-existing zone
        with self.assertRaises(Exception) as ctx:
            provider = TransipProvider('test', 'unittest', self.bogus_key)
            provider._client = MockDomainService('unittest', self.bogus_key)
            zone = Zone('notfound.unit.tests.', [])
            provider.populate(zone, True)

        self.assertEquals(str('TransipNewZoneException'),
                          str(ctx.exception.__class__.__name__))

        self.assertEquals(
            'populate: (102) Transip used as target' +
            ' for non-existing zone: notfound.unit.tests.',
            text_type(ctx.exception))

        # Happy Plan - Zone does not exists
        # Won't trigger an exception if provider is NOT used as a target for a
        # non-existing zone.
        provider = TransipProvider('test', 'unittest', self.bogus_key)
        provider._client = MockDomainService('unittest', self.bogus_key)
        zone = Zone('notfound.unit.tests.', [])
        provider.populate(zone, False)

        # Happy Plan - Populate with mockup records
        provider = TransipProvider('test', 'unittest', self.bogus_key)
        provider._client = MockDomainService('unittest', self.bogus_key)
        provider._client.mockup(_expected.records)
        zone = Zone('unit.tests.', [])
        provider.populate(zone, False)

        # Transip allows relative values for types like cname, mx.
        # Test is these are correctly appended with the domain
        provider._currentZone = zone
        self.assertEquals("www.unit.tests.", provider._parse_to_fqdn("www"))
        self.assertEquals("www.unit.tests.",
                          provider._parse_to_fqdn("www.unit.tests."))
        self.assertEquals("www.sub.sub.sub.unit.tests.",
                          provider._parse_to_fqdn("www.sub.sub.sub"))
        self.assertEquals("unit.tests.", provider._parse_to_fqdn("@"))

        # Happy Plan - Even if the zone has no records the zone should exist
        provider = TransipProvider('test', 'unittest', self.bogus_key)
        provider._client = MockDomainService('unittest', self.bogus_key)
        zone = Zone('unit.tests.', [])
        exists = provider.populate(zone, True)
        self.assertTrue(exists, 'populate should return true')

        return
Пример #50
0
 def fake_unicode(self):
     return six.text_type(self.message)
Пример #51
0
    def comment(self, data):
        assert isinstance(data, string_types), type(data)

        return {"type": "Comment", "data": text_type(data)}
Пример #52
0
def check_course_access(course,
                        user,
                        action,
                        check_if_enrolled=False,
                        check_survey_complete=True):
    """
    Check that the user has the access to perform the specified action
    on the course (CourseDescriptor|CourseOverview).

    check_if_enrolled: If true, additionally verifies that the user is enrolled.
    check_survey_complete: If true, additionally verifies that the user has completed the survey.
    """
    # Allow staff full access to the course even if not enrolled
    if has_access(user, 'staff', course.id):
        return

    request = get_current_request()
    check_content_start_date_for_masquerade_user(course.id, user, request,
                                                 course.start)

    access_response = has_access(user, action, course, course.id)
    if not access_response:
        # Redirect if StartDateError
        if isinstance(access_response, StartDateError):
            start_date = strftime_localized(course.start, 'SHORT_DATE')
            params = QueryDict(mutable=True)
            params['notlive'] = start_date
            raise CourseAccessRedirect(
                '{dashboard_url}?{params}'.format(
                    dashboard_url=reverse('dashboard'),
                    params=params.urlencode()), access_response)

        # Redirect if AuditExpiredError
        if isinstance(access_response, AuditExpiredError):
            params = QueryDict(mutable=True)
            params[
                'access_response_error'] = access_response.additional_context_user_message
            raise CourseAccessRedirect(
                '{dashboard_url}?{params}'.format(
                    dashboard_url=reverse('dashboard'),
                    params=params.urlencode()), access_response)

        # Redirect if the user must answer a survey before entering the course.
        if isinstance(access_response, MilestoneAccessError):
            raise CourseAccessRedirect(
                '{dashboard_url}'.format(dashboard_url=reverse('dashboard'), ),
                access_response)

        # Deliberately return a non-specific error message to avoid
        # leaking info about access control settings
        raise CoursewareAccessException(access_response)

    if check_if_enrolled:
        # If the user is not enrolled, redirect them to the about page
        if not CourseEnrollment.is_enrolled(user, course.id):
            raise CourseAccessRedirect(
                reverse('about_course', args=[six.text_type(course.id)]))

    # Redirect if the user must answer a survey before entering the course.
    if check_survey_complete and action == 'load':
        if is_survey_required_and_unanswered(user, course):
            raise CourseAccessRedirect(
                reverse('course_survey', args=[six.text_type(course.id)]))
Пример #53
0
import uuid
import time
import six
import pytest
from lxml import etree
try:  # pragma: no cover
    from HTMLParser import HTMLParser
except ImportError:
    from html.parser import HTMLParser

from pyvas import Client, exceptions

HOST = os.environ.get("OPENVAS_HOST")
USERNAME = os.environ.get("OPENVAS_USER")
PASSWORD = os.environ.get("OPENVAS_PASSWORD")
NAME = six.text_type(uuid.uuid4())[:6]
LOCALHOST = "127.0.0.1"

print("\n\n== Environ ==")
print("HOST = {}".format(HOST))
print("=============\n")

slow = pytest.mark.skipif(not pytest.config.getoption("--slow"),
                          reason="need --slow option to run")


@pytest.fixture(scope="module")
def client(request):
    with Client(HOST, username=USERNAME, password=PASSWORD) as cli:
        yield cli
Пример #54
0
    def entity(self, name):
        assert isinstance(name, string_types), type(name)

        return {"type": "Entity", "name": text_type(name)}
Пример #55
0
# Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.

import logging
import os
import sys
import unittest

import six

# Directory client/tests/
TESTS_DIR = os.path.dirname(os.path.abspath(six.text_type(__file__)))

# Directory client/
CLIENT_DIR = os.path.dirname(TESTS_DIR)
sys.path.insert(0, CLIENT_DIR)

# Fix import path.
sys.path.insert(0, os.path.join(
    CLIENT_DIR, 'third_party', 'httplib2', 'python%d' % sys.version_info.major))
sys.path.insert(0, os.path.join(CLIENT_DIR, 'third_party', 'pyasn1'))
sys.path.insert(0, os.path.join(CLIENT_DIR, 'third_party', 'rsa'))
sys.path.insert(0, os.path.join(CLIENT_DIR, 'third_party'))

# third_party/
from depot_tools import fix_encoding

from utils import fs

Пример #56
0
 def test_get(self, client):
     with pytest.raises(exceptions.ElementNotFound):
         client._get('target',
                     uuid=six.text_type(uuid.uuid4()),
                     cb=lambda x: x)
 def _backendException(self, e):
     return exception.VolumeBackendAPIException(data=six.text_type(e))
Пример #58
0
def _recalculate_subsection_grade(self, **kwargs):
    """
    Updates a saved subsection grade.

    Keyword Arguments:
        user_id (int): id of applicable User object
        anonymous_user_id (int, OPTIONAL): Anonymous ID of the User
        course_id (string): identifying the course
        usage_id (string): identifying the course block
        only_if_higher (boolean): indicating whether grades should
            be updated only if the new raw_earned is higher than the
            previous value.
        expected_modified_time (serialized timestamp): indicates when the task
            was queued so that we can verify the underlying data update.
        score_deleted (boolean): indicating whether the grade change is
            a result of the problem's score being deleted.
        event_transaction_id (string): uuid identifying the current
            event transaction.
        event_transaction_type (string): human-readable type of the
            event at the root of the current event transaction.
        score_db_table (ScoreDatabaseTableEnum): database table that houses
            the changed score. Used in conjunction with expected_modified_time.
    """
    try:
        course_key = CourseLocator.from_string(kwargs['course_id'])
        if are_grades_frozen(course_key):
            log.info(u"Attempted _recalculate_subsection_grade for course '%s', but grades are frozen.", course_key)
            return

        scored_block_usage_key = UsageKey.from_string(kwargs['usage_id']).replace(course_key=course_key)

        set_custom_attributes_for_course_key(course_key)
        set_custom_attribute('usage_id', six.text_type(scored_block_usage_key))

        # The request cache is not maintained on celery workers,
        # where this code runs. So we take the values from the
        # main request cache and store them in the local request
        # cache. This correlates model-level grading events with
        # higher-level ones.
        set_event_transaction_id(kwargs.get('event_transaction_id'))
        set_event_transaction_type(kwargs.get('event_transaction_type'))

        # Verify the database has been updated with the scores when the task was
        # created. This race condition occurs if the transaction in the task
        # creator's process hasn't committed before the task initiates in the worker
        # process.
        has_database_updated = _has_db_updated_with_new_score(self, scored_block_usage_key, **kwargs)

        if not has_database_updated:
            raise DatabaseNotReadyError

        _update_subsection_grades(
            course_key,
            scored_block_usage_key,
            kwargs['only_if_higher'],
            kwargs['user_id'],
            kwargs['score_deleted'],
            kwargs.get('force_update_subsections', False),
        )
    except Exception as exc:
        if not isinstance(exc, KNOWN_RETRY_ERRORS):
            log.info(u"tnl-6244 grades unexpected failure: {}. task id: {}. kwargs={}".format(
                repr(exc),
                self.request.id,
                kwargs,
            ))
        raise self.retry(kwargs=kwargs, exc=exc)
Пример #59
0
def devserver(reload, watchers, workers, browser_reload, styleguide, prefix,
              environment, bind):
    "Starts a lightweight web server for development."
    if ':' in bind:
        host, port = bind.split(':', 1)
        port = int(port)
    else:
        host = bind
        port = None

    import os

    os.environ['SENTRY_ENVIRONMENT'] = environment

    from django.conf import settings
    from sentry import options
    from sentry.services.http import SentryHTTPServer

    url_prefix = options.get('system.url-prefix', '')
    parsed_url = urlparse(url_prefix)
    # Make sure we're trying to use a port that we can actually bind to
    needs_https = (parsed_url.scheme == 'https'
                   and (parsed_url.port or 443) > 1024)
    has_https = False

    if needs_https:
        from subprocess import check_output
        try:
            check_output(['which', 'https'])
            has_https = True
        except Exception:
            has_https = False
            from sentry.runner.initializer import show_big_error
            show_big_error([
                'missing `https` on your `$PATH`, but https is needed',
                '`$ brew install mattrobenolt/stuff/https`',
            ])

    uwsgi_overrides = {
        # Make sure we don't try and use uwsgi protocol
        'protocol': 'http',
        # Make sure we reload really quickly for local dev in case it
        # doesn't want to shut down nicely on it's own, NO MERCY
        'worker-reload-mercy': 2,
        # We need stdin to support pdb in devserver
        'honour-stdin': True,
        # accept ridiculously large files
        'limit-post': 1 << 30,
        # do something with chunked
        'http-chunked-input': True,
        'thunder-lock': False,
        'timeout': 600,
        'harakiri': 600,
    }

    if reload:
        uwsgi_overrides['py-autoreload'] = 1

    daemons = []

    if watchers:
        daemons += settings.SENTRY_WATCHERS

    # When using browser_reload we proxy all requests through webpacks
    # devserver on the configured port. The backend is served on port+1 and is
    # proxied via the webpack configuration.
    if watchers and browser_reload:
        proxy_port = port
        port = port + 1

        os.environ['SENTRY_WEBPACK_PROXY_PORT'] = '%s' % proxy_port
        os.environ['SENTRY_BACKEND_PORT'] = '%s' % port

        # Replace the webpack watcher with the drop-in webpack-dev-server
        webpack_config = next(w for w in daemons if w[0] == 'webpack')[1]
        webpack_config[0] = os.path.join(
            *os.path.split(webpack_config[0])[0:-1] + ('webpack-dev-server', ))

        daemons = [w for w in daemons if w[0] != 'webpack'] + [
            ('webpack', webpack_config),
        ]

    if workers:
        if settings.CELERY_ALWAYS_EAGER:
            raise click.ClickException(
                'Disable CELERY_ALWAYS_EAGER in your settings file to spawn workers.'
            )

        daemons += [
            ('worker', ['sentry', 'run', 'worker', '-c', '1', '--autoreload']),
            ('cron', ['sentry', 'run', 'cron', '--autoreload']),
        ]

        from sentry import eventstream
        if eventstream.requires_post_process_forwarder():
            daemons += [
                ('relay', [
                    'sentry', 'run', 'post-process-forwarder',
                    '--loglevel=debug', '--commit-batch-size=1'
                ]),
            ]

    if needs_https and has_https:
        https_port = six.text_type(parsed_url.port)
        https_host = parsed_url.hostname

        # Determine a random port for the backend http server
        import socket
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        s.bind((host, 0))
        port = s.getsockname()[1]
        s.close()
        bind = '%s:%d' % (host, port)

        daemons += [
            ('https', [
                'https', '-host', https_host, '-listen',
                host + ':' + https_port, bind
            ]),
        ]

    # A better log-format for local dev when running through honcho,
    # but if there aren't any other daemons, we don't want to override.
    if daemons:
        uwsgi_overrides[
            'log-format'] = '"%(method) %(uri) %(proto)" %(status) %(size)'
    else:
        uwsgi_overrides[
            'log-format'] = '[%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size)'

    server = SentryHTTPServer(host=host,
                              port=port,
                              workers=1,
                              extra_options=uwsgi_overrides)

    # If we don't need any other daemons, just launch a normal uwsgi webserver
    # and avoid dealing with subprocesses
    if not daemons:
        return server.run()

    import sys
    from subprocess import list2cmdline
    from honcho.manager import Manager
    from honcho.printer import Printer

    os.environ['PYTHONUNBUFFERED'] = 'true'

    # Make sure that the environment is prepared before honcho takes over
    # This sets all the appropriate uwsgi env vars, etc
    server.prepare_environment()
    daemons += [
        ('server', ['sentry', 'run', 'web']),
    ]

    if styleguide:
        daemons += [('storybook', ['./bin/yarn', 'storybook'])]

    cwd = os.path.realpath(
        os.path.join(settings.PROJECT_ROOT, os.pardir, os.pardir))

    manager = Manager(Printer(prefix=prefix))
    for name, cmd in daemons:
        manager.add_process(
            name,
            list2cmdline(cmd),
            quiet=False,
            cwd=cwd,
        )

    manager.loop()
    sys.exit(manager.returncode)
Пример #60
0
def fire_ungenerated_certificate_task(user, course_key, expected_verification_status=None):
    """
    Helper function to fire certificate generation task.
    Auto-generation of certificates is available for following course modes:
        1- VERIFIED
        2- CREDIT_MODE
        3- PROFESSIONAL
        4- NO_ID_PROFESSIONAL_MODE

    Certificate generation task is fired to either generate a certificate
    when there is no generated certificate for user in a particular course or
    update a certificate if it has 'unverified' status.

    Task is fired to attempt an update to a certificate
    with 'unverified' status as this method is called when a user is
    successfully verified, any certificate associated
    with such user can now be verified.

    NOTE: Purpose of restricting other course modes (HONOR and AUDIT) from auto-generation is to reduce
    traffic to workers.
    """

    message = u'Entered into Ungenerated Certificate task for {user} : {course}'
    log.info(message.format(user=user.id, course=course_key))

    if is_using_certificate_allowlist_and_is_on_allowlist(user, course_key):
        log.info('{course} is using allowlist certificates, and the user {user} is on its allowlist. Attempt will be '
                 'made to generate an allowlist certificate.'.format(course=course_key, user=user.id))
        generate_allowlist_certificate_task(user, course_key)
        return True

    log.info('{course} is not using allowlist certificates (or user {user} is not on its allowlist). The normal '
             'generation logic will be followed.'.format(course=course_key, user=user.id))

    allowed_enrollment_modes_list = [
        CourseMode.VERIFIED,
        CourseMode.CREDIT_MODE,
        CourseMode.PROFESSIONAL,
        CourseMode.NO_ID_PROFESSIONAL_MODE,
        CourseMode.MASTERS,
        CourseMode.EXECUTIVE_EDUCATION,
    ]
    enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(user, course_key)
    cert = GeneratedCertificate.certificate_for_student(user, course_key)

    generate_learner_certificate = (
        enrollment_mode in allowed_enrollment_modes_list and (cert is None or cert.status == 'unverified')
    )

    if generate_learner_certificate:
        kwargs = {
            'student': six.text_type(user.id),
            'course_key': six.text_type(course_key)
        }
        if expected_verification_status:
            kwargs['expected_verification_status'] = six.text_type(expected_verification_status)
        generate_certificate.apply_async(countdown=CERTIFICATE_DELAY_SECONDS, kwargs=kwargs)
        return True

    message = u'Certificate Generation task failed for {user} : {course}'
    log.info(message.format(user=user.id, course=course_key))
    return False