def test_switching_search_path(self):
        dummies_tenant1_count, dummies_tenant2_count = 0, 0

        tenant1 = Tenant(domain_url='test.com', schema_name='tenant1')
        tenant1.save()

        tenant2 = Tenant(domain_url='example.com', schema_name='tenant2')
        tenant2.save()

        # go to tenant1's path
        connection.set_tenant(tenant1)

        # add some data
        DummyModel(name="Schemas are").save()
        DummyModel(name="awesome!").save()
        dummies_tenant1_count = DummyModel.objects.count()

        # switch temporarily to tenant2's path
        with tenant_context(tenant2):
            # add some data
            DummyModel(name="Man,").save()
            DummyModel(name="testing").save()
            DummyModel(name="is great!").save()
            dummies_tenant2_count = DummyModel.objects.count()

        # we should be back to tenant1's path, test what we have
        self.assertEqual(DummyModel.objects.count(), dummies_tenant1_count)

        # switch back to tenant2's path
        with tenant_context(tenant2):
            self.assertEqual(DummyModel.objects.count(), dummies_tenant2_count)
    def test_switching_search_path(self):
        tenant1 = Tenant(domain_url='something.test.com',
                         schema_name='tenant1')
        tenant1.save()

        connection.set_schema_to_public()
        tenant2 = Tenant(domain_url='example.com', schema_name='tenant2')
        tenant2.save()

        # go to tenant1's path
        connection.set_tenant(tenant1)

        # add some data, 2 DummyModels for tenant1
        DummyModel(name="Schemas are").save()
        DummyModel(name="awesome!").save()

        # switch temporarily to tenant2's path
        with tenant_context(tenant2):
            # add some data, 3 DummyModels for tenant2
            DummyModel(name="Man,").save()
            DummyModel(name="testing").save()
            DummyModel(name="is great!").save()

        # we should be back to tenant1's path, test what we have
        self.assertEqual(2, DummyModel.objects.count())

        # switch back to tenant2's path
        with tenant_context(tenant2):
            self.assertEqual(3, DummyModel.objects.count())
    def setUpClass(cls):
        super(SharedAuthTest, cls).setUpClass()
        settings.SHARED_APPS = ('tenant_schemas',
                                'django.contrib.auth',
                                'django.contrib.contenttypes', )
        settings.TENANT_APPS = ('dts_test_app', )
        settings.INSTALLED_APPS = settings.SHARED_APPS + settings.TENANT_APPS
        cls.sync_shared()
        Tenant(domain_url='test.com', schema_name=get_public_schema_name()).save()

        # Create a tenant
        cls.tenant = Tenant(domain_url='tenant.test.com', schema_name='tenant')
        cls.tenant.save()

        # Create some users
        with schema_context(get_public_schema_name()):  # this could actually also be executed inside a tenant
            cls.user1 = User(username='******', email="*****@*****.**")
            cls.user1.save()
            cls.user2 = User(username='******', email="*****@*****.**")
            cls.user2.save()

        # Create instances on the tenant that point to the users on public
        with tenant_context(cls.tenant):
            cls.d1 = ModelWithFkToPublicUser(user=cls.user1)
            cls.d1.save()
            cls.d2 = ModelWithFkToPublicUser(user=cls.user2)
            cls.d2.save()
Example #4
0
 def wrapper(*args, **kwargs):
     if settings.MULTI_TENANT:
         for tenant in get_tenant_model().objects.exclude(schema_name="public"):
             with tenant_context(tenant):
                 f(*args, **kwargs)
     else:
         f(*args, **kwargs)
Example #5
0
    def receive(self, content, **kwargs):
        tenant_pk = self.message.channel_session.get('tenant')
        logger.info('tenant pk: {}'.format(tenant_pk))

        if tenant_pk is None:
            logger.error('TransactionConsumer tenant not in session')
            return

        try:
            tenant = get_tenant_model().objects.get(
                pk=tenant_pk
            )
        except get_tenant_model().DoesNotExist:
            return

        with tenant_context(tenant):
            more_blocks = Block.objects.exclude(
                height__isnull=True
            ).filter(
                height__lt=content.get('height')
            ).order_by(
                '-height'
            )[:20]

            kwargs.get('multiplexer').send(
                {
                    "more_blocks": [
                        block.serialize() for block in more_blocks
                    ]
                }
            )
Example #6
0
    def receive(self, content, multiplexer, **kwargs):
        logger.info(multiplexer)
        tenant_pk = self.message.channel_session.get('tenant')
        logger.info('tenant pk: {}'.format(tenant_pk))

        if tenant_pk is None:
            logger.error('TransactionConsumer tenant not in session')
            logger.info(self.message.content)
            return

        try:
            tenant = get_tenant_model().objects.get(
                pk=tenant_pk
            )
        except get_tenant_model().DoesNotExist:
            return

        with tenant_context(tenant):
            try:
                block = Block.objects.get(height=int(content.get('height')))
            except Block.DoesNotExist:
                return

            multiplexer.send(
                {
                    'transactions': [
                        tx.serialize() for tx in block.transactions.all()
                    ]
                }
            )
Example #7
0
    def receive(self, content, multiplexer, **kwargs):
        logger.info(multiplexer)
        logger.info(self.message)
        tenant_pk = self.message.channel_session.get('tenant')
        logger.info('tenant pk: {}'.format(tenant_pk))

        if tenant_pk is None:
            logger.error('TransactionConsumer tenant not in session')
            logger.info(self.message.channel)
            return

        try:
            tenant = get_tenant_model().objects.get(
                pk=tenant_pk
            )
        except get_tenant_model().DoesNotExist:
            return

        with tenant_context(tenant):
            try:
                block = Block.objects.get(height=content.get('height'))
            except Block.DoesNotExist:
                return

            multiplexer.send(
                {
                    'block': block.serialize(),
                    'next_block': block.next_block.height if block.next_block else None,
                    'previous_block': block.previous_block.height if block.previous_block else None,
                }
            )
Example #8
0
def create_superuser(tenant, email):
    # raise Exception(email)
    with tenant_context(tenant):
        USER_MODEL = get_user_model()
        # Create a superuser for the given tenant
        # we set a dummy password for now ...
        # we should also send an activation mail, as we set no password
        USER_MODEL.objects.create_superuser(email=email, password="******")
 def test_direct_relation_to_public(self):
     """
     Tests that a forward relationship through a foreign key to public from a model inside TENANT_APPS works.
     """
     with tenant_context(self.tenant):
         self.assertEqual(User.objects.get(pk=self.user1.id),
                          ModelWithFkToPublicUser.objects.get(pk=self.d1.id).user)
         self.assertEqual(User.objects.get(pk=self.user2.id),
                          ModelWithFkToPublicUser.objects.get(pk=self.d2.id).user)
 def test_reverse_relation_to_public(self):
     """
     Tests that a reverse relationship through a foreign keys to public from a model inside TENANT_APPS works.
     """
     with tenant_context(self.tenant):
         users = User.objects.all().select_related().order_by('id')
         self.assertEqual(ModelWithFkToPublicUser.objects.get(pk=self.d1.id),
                          users[0].modelwithfktopublicuser_set.all()[:1].get())
         self.assertEqual(ModelWithFkToPublicUser.objects.get(pk=self.d2.id),
                          users[1].modelwithfktopublicuser_set.all()[:1].get())
    def test_switching_tenant_without_previous_tenant(self):
        tenant = Tenant(domain_url='something.test.com', schema_name='test')
        tenant.save()

        connection.tenant = None
        with tenant_context(tenant):
            DummyModel(name="No exception please").save()

        connection.tenant = None
        with schema_context(tenant.schema_name):
            DummyModel(name="Survived it!").save()
Example #12
0
def event_generator(tenant_id, user_list, object_id, content_id):
    tenant = get_object_or_None(Customer, id=tenant_id)
    if not tenant:
        return
    with tenant_context(tenant):
        try:
            content_type_object = ContentType.objects.get(id=content_id)
            content_object = content_type_object\
                .get_object_for_this_type(id=object_id)

        except ObjectDoesNotExist:
            return

        recipients = get_user_model().objects.filter(id__in=user_list)

        for recipient in recipients:
            content_object.create_event_object(user=recipient)
Example #13
0
def _make_role(tenant, data, update=False):
    """Create the role object in the database."""
    with tenant_context(tenant):
        name = data.pop('name')
        description = data.pop('description', None)
        access_list = data.pop('access')
        version = data.pop('version', 1)
        version_diff = False
        is_platform_default = data.pop('platform_default', False)
        if update:
            role, created = Role.objects.filter(name=name).get_or_create(
                name=name)
            version_diff = version != role.version
            if created or (not created and version_diff):
                logger.info('Updating role %s for tenant %s.', name,
                            tenant.schema_name)
                role.description = description
                role.system = True
                role.version = version
                role.platform_default = is_platform_default
                role.save()
                role.access.all().delete()
        else:
            role = Role.objects.create(name=name,
                                       description=description,
                                       system=True,
                                       version=version,
                                       platform_default=is_platform_default)
            logger.info('Creating role %s for tenant %s.', name,
                        tenant.schema_name)
        if not update or (update and version_diff):
            for access_item in access_list:
                resource_def_list = access_item.pop('resourceDefinitions', [])
                access_obj = Access.objects.create(**access_item, role=role)
                access_obj.save()
                for resource_def_item in resource_def_list:
                    res_def = ResourceDefinition.objects.create(
                        **resource_def_item, access=access_obj)
                    res_def.save()
    return role
Example #14
0
    def test_get_cluster_capacity_daily_resolution_group_by_clusters(self):
        """Test that cluster capacity returns daily capacity by cluster."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily&group_by[cluster]=*'
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity_by_cluster = defaultdict(dict)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start', 'cluster_id']
        annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.query_table
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get('usage_start'))
                cluster_id = entry.get('cluster_id', '')
                if cluster_id in daily_capacity_by_cluster[date]:
                    daily_capacity_by_cluster[date][cluster_id] += entry.get(cap_key, 0)
                else:
                    daily_capacity_by_cluster[date][cluster_id] = entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get('data', []):
            date = entry.get('date')
            for cluster in entry.get('clusters', []):
                cluster_name = cluster.get('cluster', '')
                capacity = cluster.get('values')[0].get('capacity', {}).get('value')
                self.assertEqual(
                    capacity, daily_capacity_by_cluster[date][cluster_name]
                )

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'), total_capacity
        )
Example #15
0
    def setUp(self):
        """Set up the role viewset tests."""
        super().setUp()
        request = self.request_context['request']
        user = User()
        user.username = self.user_data['username']
        user.account = self.customer_data['account_id']
        request.user = user

        sys_role_config = {'name': 'system_role', 'system': True}

        def_role_config = {'name': 'default_role', 'platform_default': True}

        self.display_fields = {
            'applications', 'description', 'uuid', 'name', 'system', 'created',
            'policyCount', 'accessCount', 'modified', 'platform_default'
        }

        with tenant_context(self.tenant):
            self.principal = Principal(username=self.user_data['username'])
            self.principal.save()
            self.policy = Policy.objects.create(name='policyA')
            self.group = Group(name='groupA', description='groupA description')
            self.group.save()
            self.group.principals.add(self.principal)
            self.group.policies.add(self.policy)
            self.group.save()

            self.sysRole = Role(**sys_role_config)
            self.sysRole.save()

            self.defRole = Role(**def_role_config)
            self.defRole.save()
            self.defRole.save()

            self.policy.roles.add(self.defRole, self.sysRole)
            self.policy.save()

            self.access = Access.objects.create(permission='app:*:*',
                                                role=self.defRole)
Example #16
0
    def test_execute_query_with_and_filter(self):
        """Test the filter[and:] param in the view."""
        url = reverse("openshift-tags")
        client = APIClient()

        with tenant_context(self.tenant):
            projects = (OCPUsageLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).values(
                    "namespace").distinct())
            projects = [project.get("namespace") for project in projects]
        params = {
            "filter[resolution]": "daily",
            "filter[time_scope_value]": "-10",
            "filter[time_scope_units]": "day",
            "filter[and:project]": projects,
        }
        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        self.assertEqual(response_data.get("data", []), [])
Example #17
0
def tenant_view(request, tenant_schema_name):
    """View method for internal tenant requests.

    DELETE /_private/api/tenant/<schema_name>/
    """
    logger.info(f"Tenant view: {request.method} {request.user.username}")
    if request.method == "DELETE":
        if not destructive_ok():
            return HttpResponse("Destructive operations disallowed.",
                                status=400)

        tenant_obj = get_object_or_404(Tenant, schema_name=tenant_schema_name)
        with tenant_context(tenant_obj):
            if tenant_is_unmodified():
                logger.warning(
                    f"Deleting tenant {tenant_schema_name}. Requested by {request.user.username}"
                )
                tenant_obj.delete()
                return HttpResponse(status=204)
            else:
                return HttpResponse("Tenant cannot be deleted.", status=400)
    return HttpResponse(f'Method "{request.method}" not allowed.', status=405)
Example #18
0
    def destroy(self, request, *args, **kwargs):
        """Delete a group.

        @api {delete} /api/v1/groups/:uuid   Delete a group
        @apiName deleteGroup
        @apiGroup Group
        @apiVersion 1.0.0
        @apiDescription Delete a group

        @apiHeader {String} token User authorization token

        @apiParam (Path) {String} uuid Group unique identifier

        @apiSuccessExample {json} Success-Response:
            HTTP/1.1 204 NO CONTENT
        """
        validate_uuid(kwargs.get("uuid"), "group uuid validation")
        self.protect_default_groups("delete")
        group_name = Group.objects.get(uuid=kwargs.get("uuid")).name
        with tenant_context(Tenant.objects.get(schema_name="public")):
            Group.objects.filter(name=group_name, tenant=request.tenant).delete()
        return super().destroy(request=request, args=args, kwargs=kwargs)
    def test_remove_aws_auth_billing_remain(self):
        """Remove aws provider."""
        # Create Provider
        provider_authentication = ProviderAuthentication.objects.create(provider_resource_name='arn:aws:iam::2:role/mg')
        provider_authentication2 = ProviderAuthentication.objects.create(
            provider_resource_name='arn:aws:iam::3:role/mg'
        )
        provider_billing = ProviderBillingSource.objects.create(bucket='my_s3_bucket')
        provider = Provider.objects.create(name='awsprovidername',
                                           created_by=self.user,
                                           customer=self.customer,
                                           authentication=provider_authentication,
                                           billing_source=provider_billing)
        provider2 = Provider.objects.create(name='awsprovidername2',
                                            created_by=self.user,
                                            customer=self.customer,
                                            authentication=provider_authentication2,
                                            billing_source=provider_billing)
        provider_uuid = provider2.uuid

        self.assertNotEqual(provider.uuid, provider2.uuid)
        new_user_dict = self._create_user_data()
        request_context = self._create_request_context(self.customer_data,
                                                       new_user_dict, False)
        user_serializer = UserSerializer(data=new_user_dict, context=request_context)
        other_user = None
        if user_serializer.is_valid(raise_exception=True):
            other_user = user_serializer.save()

        with tenant_context(self.tenant):
            manager = ProviderManager(provider_uuid)
            manager.remove(self._create_delete_request(other_user))
        auth_count = ProviderAuthentication.objects.count()
        billing_count = ProviderBillingSource.objects.count()
        provider_query = Provider.objects.all().filter(uuid=provider_uuid)

        self.assertFalse(provider_query)
        self.assertEqual(auth_count, 1)
        self.assertEqual(billing_count, 1)
Example #20
0
    def test_execute_query_ocp_aws_costs_group_by_project(self):
        """Test that grouping by project filters data."""
        with tenant_context(self.tenant):
            # Force Django to do GROUP BY to get nodes
            projects = (OCPAWSCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).values(
                    *["namespace"]).annotate(
                        project_count=Count("namespace")).all())
            project_of_interest = projects[0].get("namespace")

        url = reverse("reports-openshift-aws-costs")
        client = APIClient()
        params = {"group_by[project]": project_of_interest}

        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json()
        for entry in data.get("data", []):
            for project in entry.get("projects", []):
                self.assertEqual(project.get("project"), project_of_interest)
Example #21
0
    def test_tiered_rate_with_duplicate(self):
        """Test creating a tiered rate with duplicate tiers."""
        self.ocp_data['rates'][0]['tiered_rates'] = [
            {
                'unit': 'USD',
                'value': 0.22,
                'usage': {
                    'usage_start': None,
                    'usage_end': 10.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 10.0,
                    'usage_end': 20.0
                }
            }, {
                'unit': 'USD',
                'value': 0.26,
                'usage': {
                    'usage_start': 20.0,
                    'usage_end': None
                }
            }
        ]

        with tenant_context(self.tenant):
            serializer = CostModelSerializer(data=self.ocp_data)
            with self.assertRaises(serializers.ValidationError):
                if serializer.is_valid(raise_exception=True):
                    serializer.save()
Example #22
0
    def test_product_update(self, member1, client1, planninguser1):
        with tenant_context(member1.tenant):
            client1.force_login(planninguser1)
            product = factories.ProductFactory(
                name='test',
                identifier='bla',
            )

        response = client1.put(
            reverse('purchase-product-detail', kwargs={'pk': product.id}), {
                'name': 'test name',
                'identifier': 'bla',
                'price_purchase': '0.00',
                'price_selling': '0.00',
                'price_selling_alt': '0.00',
                'price_purchase_ex': '0.00',
                'price_selling_ex': '0.00',
                'price_selling_alt_ex': '0.00',
            })

        assert response.status_code == status.HTTP_200_OK
        assert response.data['name'] == 'test name'
Example #23
0
    def test_create_storage_no_tiers_rate(self):
        """Test creating a non tiered storage rate."""
        storage_rates = (
            metric_constants.OCP_METRIC_STORAGE_GB_REQUEST_MONTH,
            metric_constants.OCP_METRIC_STORAGE_GB_USAGE_MONTH,
        )
        for storage_rate in storage_rates:
            ocp_data = {
                "name": "Test Cost Model",
                "description": "Test",
                "source_type": Provider.PROVIDER_OCP,
                "providers": [{"uuid": self.provider.uuid, "name": self.provider.name}],
                "rates": [{"metric": {"name": storage_rate}, "tiered_rates": [{"unit": "USD", "value": 0.22}]}],
            }

            with tenant_context(self.tenant):
                instance = None
                serializer = CostModelSerializer(data=ocp_data)
                if serializer.is_valid(raise_exception=True):
                    instance = serializer.save()
                self.assertIsNotNone(instance)
                self.assertIsNotNone(instance.uuid)
Example #24
0
    def test_get_cluster_capacity_daily_resolution(self):
        """Test that total capacity is returned daily resolution."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily'
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start']
        annotations = {'capacity': Max('total_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get('tables').get('query')
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get('usage_start'))
                daily_capacity[date] += entry.get(cap_key, 0)
            # This is a hack because the total capacity in the test data
            # is artificial but the total should still be a sum of
            # cluster capacities
            annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                total_capacity += entry.get(cap_key, 0)

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'), total_capacity
        )
        for entry in query_data.get('data', []):
            date = entry.get('date')
            values = entry.get('values')
            if values:
                capacity = values[0].get('capacity', {}).get('value')
                self.assertEqual(capacity, daily_capacity[date])
Example #25
0
 def test_delete_single_provider_with_cost_model(self, mock_delete_archived_data, mock_commit):
     """Assert the cost models are deleted upon provider instance delete."""
     mock_commit.side_effect = mock_delete_archived_data.delay(
         self.schema, Provider.PROVIDER_AWS, self.aws_provider_uuid
     )
     provider_uuid = self.aws_provider.uuid
     data = {
         "name": "Test Cost Model",
         "description": "Test",
         "rates": [],
         "markup": {"value": FAKE.pyint() % 100, "unit": "percent"},
         "provider_uuids": [provider_uuid],
     }
     with tenant_context(self.tenant):
         manager = CostModelManager()
         with patch("cost_models.cost_model_manager.chain"):
             manager.create(**data)
         cost_model_map = CostModelMap.objects.filter(provider_uuid=provider_uuid)
         self.assertIsNotNone(cost_model_map)
         self.aws_provider.delete()
         self.assertEquals(0, CostModelMap.objects.filter(provider_uuid=provider_uuid).count())
     mock_delete_archived_data.delay.assert_called_with(self.schema, Provider.PROVIDER_AWS, self.aws_provider_uuid)
Example #26
0
    def test_process_multiple_tag_query_params(self):
        """Test that grouping by multiple tag keys returns a valid response."""
        with tenant_context(self.tenant):
            labels = (OCPAWSCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).values(*["tags"]).first())
            self.assertIsNotNone(labels)
            tags = labels.get("tags")

        qstr = "filter[limit]=2"

        # pick a random subset of tags
        kval = len(tags.keys())
        if kval > 2:
            kval = random.randint(2, len(tags.keys()))
        selected_tags = random.choices(list(tags.keys()), k=kval)
        for tag in selected_tags:
            qstr += f"&group_by[tag:{tag}]=*"

        url = reverse("reports-openshift-aws-costs") + "?" + qstr
        client = APIClient()
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
Example #27
0
    def test_execute_query_ocp_aws_storage_with_group_by_tag_and_limit(self):
        """Test that data is grouped by tag key and limited."""
        with tenant_context(self.tenant):
            labels = (OCPAWSCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.dh.last_month_start).filter(
                    usage_start__lte=self.dh.last_month_end).filter(
                        product_family__contains="Storage").values(
                            *["tags"]).first())
            self.assertIsNotNone(labels)
            self.assertNotEqual(len(labels), 0)
            tags = labels.get("tags")
            group_by_key = list(tags.keys())[0]
            plural_key = group_by_key + "s"

        url = reverse("reports-openshift-aws-storage")
        client = APIClient()
        params = {
            "filter[resolution]": "monthly",
            "filter[time_scope_value]": "-2",
            "filter[time_scope_units]": "month",
            f"group_by[tag:{group_by_key}]": "*",
            "filter[limit]": 2,
        }
        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json()
        data = data.get("data", [])
        # default ordered by usage
        previous_tag_usage = data[0].get(plural_key, [])[0].get(
            "values", [{}])[0].get("usage", {}).get("value")
        for entry in data[0].get(plural_key, []):
            current_tag_usage = entry.get("values",
                                          [{}])[0].get("usage",
                                                       {}).get("value")
            if "Other" not in entry.get(group_by_key):
                self.assertTrue(current_tag_usage <= previous_tag_usage)
                previous_tag_usage = current_tag_usage
Example #28
0
    def test_execute_query_ocp_aws_storage_with_tag_filter(self):
        """Test that data is filtered by tag key."""
        with tenant_context(self.tenant):
            labels = (OCPAWSCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).filter(
                    product_family__contains="Storage").values(
                        *["tags"]).first())
            self.assertIsNotNone(labels)
            tags = labels.get("tags")
            filter_key = list(tags.keys())[0]
            filter_value = tags.get(filter_key)

            totals = (OCPAWSCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).filter(**{
                    f"tags__{filter_key}":
                    filter_value
                }).filter(product_family__contains="Storage").aggregate(
                    **{
                        "usage": Sum("usage_amount"),
                        "cost": Sum(F("unblended_cost") + F("markup_cost"))
                    }))

        url = reverse("reports-openshift-aws-storage")
        client = APIClient()
        params = {f"filter[tag:{filter_key}]": filter_value}

        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json()
        data_totals = data.get("meta", {}).get("total", {})
        for key in totals:
            expected = float(totals[key])
            if key == "cost":
                result = data_totals.get(key, {}).get("total").get("value")
            else:
                result = data_totals.get(key, {}).get("value")
            self.assertEqual(result, expected)
Example #29
0
    def setUp(self):
        """Set up the rate view tests."""
        super().setUp()
        request = self.request_context['request']
        serializer = UserSerializer(data=self.user_data,
                                    context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            user = serializer.save()
            request.user = user

        provider_data = {
            'name': 'test_provider',
            'type': Provider.PROVIDER_OCP,
            'authentication': {
                'provider_resource_name': self.fake.word()
            }
        }
        serializer = ProviderSerializer(data=provider_data,
                                        context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            self.provider = serializer.save()

        self.fake_data = {
            'provider_uuid':
            self.provider.uuid,
            'metric':
            Rate.METRIC_MEM_GB_USAGE_HOUR,
            'tiered_rate': [{
                'value': round(Decimal(random.random()), 6),
                'unit': 'USD',
                'usage_start': None,
                'usage_end': None
            }]
        }
        with tenant_context(self.tenant):
            serializer = RateSerializer(data=self.fake_data,
                                        context=self.request_context)
            if serializer.is_valid(raise_exception=True):
                serializer.save()
Example #30
0
    def _get_tenant_provider_stats(self, provider, tenant, period_start):
        """Return provider statistics for schema."""
        stats = {}
        query = None
        with tenant_context(tenant):
            if provider.type == 'OCP':
                query = OCPUsageReportPeriod.objects.filter(provider_id=provider.id,
                                                            report_period_start=period_start).first()
            elif provider.type == 'AWS' or provider.type == 'AWS-local':
                query = AWSCostEntryBill.objects.filter(provider_id=provider.id,
                                                        billing_period_start=period_start).first()
            elif provider.type == 'AZURE' or provider.type == 'AZURE-local':
                query = AzureCostEntryBill.objects.filter(provider_id=provider.id,
                                                          billing_period_start=period_start).first()
        if query and query.summary_data_creation_datetime:
            stats['summary_data_creation_datetime'] = query.summary_data_creation_datetime.strftime(DATE_TIME_FORMAT)
        if query and query.summary_data_updated_datetime:
            stats['summary_data_updated_datetime'] = query.summary_data_updated_datetime.strftime(DATE_TIME_FORMAT)
        if query and query.derived_cost_datetime:
            stats['derived_cost_datetime'] = query.derived_cost_datetime.strftime(DATE_TIME_FORMAT)

        return stats
Example #31
0
    def test_execute_query_with_and_filter_project(self):
        """Test the filter[and:] param in the view for project."""
        url = reverse("gcp-tags")
        client = APIClient()

        with tenant_context(self.tenant):
            subs = (GCPCostEntryLineItemDailySummary.objects.filter(
                usage_start__gte=self.ten_days_ago).values(
                    "project_id").distinct())
            project_id = [sub.get("project_id") for sub in subs]
        params = {
            "filter[resolution]": "daily",
            "filter[time_scope_value]": "-10",
            "filter[time_scope_units]": "day",
            "filter[and:project]": project_id,
        }
        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json().get("data")
        self.assertEqual(data, [])
Example #32
0
def clean_tenant_principals(tenant):
    """Check if all the principals in the tenant exist, remove non-existent principals."""
    with tenant_context(tenant):
        removed_principals = []
        principals = list(Principal.objects.all())
        logger.info("Running clean up on %d principals for tenant %s.",
                    len(principals), tenant.schema_name)
        for principal in principals:
            if principal.cross_account:
                continue
            logger.debug("Checking for username %s for tenant %s.",
                         principal.username, tenant.schema_name)
            resp = proxy.request_filtered_principals([principal.username])
            status_code = resp.get("status_code")
            data = resp.get("data")
            if status_code == status.HTTP_200_OK and data:
                logger.debug(
                    "Username %s found for tenant %s, no change needed.",
                    principal.username, tenant.schema_name)
            elif status_code == status.HTTP_200_OK and not data:
                removed_principals.append(principal.username)
                principal.delete()
                logger.info(
                    "Username %s not found for tenant %s, principal removed.",
                    principal.username, tenant.schema_name)
            else:
                logger.warn(
                    "Unknown status %d when checking username %s"
                    " for tenant %s, no change needed.",
                    status_code,
                    principal.username,
                    tenant.schema_name,
                )
        logger.info(
            "Completed clean up of %d principals for tenant %s, %d removed.",
            len(principals),
            tenant.schema_name,
            len(removed_principals),
        )
Example #33
0
    def test_execute_query_with_and_filter(self):
        """Test the filter[and:] param in the view."""
        url = reverse('openshift-tags')
        client = APIClient()

        with tenant_context(self.tenant):
            projects = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values('namespace').distinct()
            projects = [project.get('namespace') for project in projects]
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-10',
            'filter[time_scope_units]': 'day',
            'filter[and:project]': projects
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        self.assertEqual(response_data.get('data', []), [])
Example #34
0
    def test_execute_query_ocp_aws_storage_group_by_node(self):
        """Test that grouping by node filters data."""
        with tenant_context(self.tenant):
            # Force Django to do GROUP BY to get nodes
            nodes = (OCPAWSCostLineItemDailySummary.objects.values(
                *["node"]).filter(usage_start__gte=self.ten_days_ago).filter(
                    product_family__contains="Storage").values(
                        *["node"]).annotate(node_count=Count("node")).all())
            node_of_interest = nodes[0].get("node")

        url = reverse("reports-openshift-aws-storage")
        client = APIClient()
        params = {"group_by[node]": node_of_interest}

        url = url + "?" + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json()
        for entry in data.get("data", []):
            for node in entry.get("nodes", []):
                self.assertEqual(node.get("node"), node_of_interest)
Example #35
0
    def predict(self):
        """Define ORM query to run forecast and return prediction."""
        cost_predictions = {}
        with tenant_context(self.params.tenant):
            data = (self.cost_summary_table.objects.filter(
                self.filters.compose()).order_by("usage_start").values(
                    "usage_start").annotate(
                        total_cost=self.total_cost_term,
                        supplementary_cost=self.supplementary_cost_term,
                        infrastructure_cost=self.infrastructure_cost_term,
                    ))

            for fieldname in [
                    "total_cost", "infrastructure_cost", "supplementary_cost"
            ]:
                uniq_data = self._uniquify_qset(data.values(
                    "usage_start", fieldname),
                                                field=fieldname)
                cost_predictions[fieldname] = self._predict(uniq_data)

            cost_predictions = self._key_results_by_date(cost_predictions)
            return self.format_result(cost_predictions)
Example #36
0
    def get_tag_keys(self, filters=True):
        """Get a list of tag keys to validate filters."""
        type_filter = self.parameter_filter.get('type')
        tag_keys = []
        with tenant_context(self.tenant):
            for source in self.data_sources:
                tag_keys_query = source.get('db_table').objects
                if filters is True:
                    tag_keys_query = tag_keys_query.filter(self.query_filter)

                if type_filter and type_filter != source.get('type'):
                    continue

                tag_keys_query = tag_keys_query.annotate(tag_keys=JSONBObjectKeys(source.get('db_column')))\
                    .values('tag_keys')\
                    .distinct()\
                    .all()
                tag_keys_query = [tag.get('tag_keys') for tag in tag_keys_query]
                for tag_key in tag_keys_query:
                    tag_keys.append(tag_key)

        return list(set(tag_keys))
Example #37
0
    def test_create_storage_no_tiers_rate(self):
        """Test creating a non tiered storage rate."""
        storage_rates = (Rate.METRIC_STORAGE_GB_REQUEST_MONTH,
                         Rate.METRIC_STORAGE_GB_USAGE_MONTH)
        for storage_rate in storage_rates:
            rate = {
                'provider_uuid': self.provider.uuid,
                'metric': storage_rate,
                'tiered_rate': [{
                    'unit': 'USD',
                    'value': 0.22
                }]
            }

        with tenant_context(self.tenant):
            instance = None
            serializer = RateSerializer(data=rate)
            if serializer.is_valid(raise_exception=True):
                instance = serializer.save()

            self.assertIsNotNone(instance)
            self.assertIsNotNone(instance.uuid)
Example #38
0
def create_or_update_superuser(tenant, old_email, email):

    with tenant_context(tenant):
        USER_MODEL = get_user_model()
        # Create a superuser for the given tenant
        # we set a dummy password for now ...
        # we should also send an activation mail, as we set no password

        try:
            u = USER_MODEL.objects.get(email=old_email)
            u.is_superuser = True
            u.email = email
            u.save()

        except USER_MODEL.DoesNotExist:
            try:
                u = USER_MODEL.objects.get(email=email)
                u.is_superuser = True
                u.email = email
                u.save()
            except USER_MODEL.DoesNotExist:
                USER_MODEL.objects.create_superuser(email=email, password="******")
 def copy_custom_groups_to_public(self, tenant):
     """Copy custom groups from provided tenant to the public schema."""
     groups = Group.objects.filter(system=False)
     public_schema = Tenant.objects.get(schema_name="public")
     for group in groups:
         self.stdout.write(f"Copying group {group.name} to public schema for tenant {tenant}.")
         if not group.tenant:
             group.tenant = tenant
             group.save()
         principals = list(group.principals.all())
         new_principals = []
         with tenant_context(public_schema):
             self.clear_pk(group)
             try:
                 group.save()
             except IntegrityError as err:
                 self.stderr.write(f"Couldn't copy group {group.name}. Skipping due to:\n{err}")
                 continue
             for principal in principals:
                 new_principals.append(Principal.objects.get(username=principal.username, tenant=tenant))
             group.principals.set(new_principals)
             group.save()
Example #40
0
    def get_tag_keys(self, filters=True):
        """Get a list of tag keys to validate filters."""
        type_filter = self.parameters.get_filter("type")
        tag_keys = set()
        with tenant_context(self.tenant):
            for source in self.data_sources:
                tag_keys_query = source.get("db_table").objects
                annotations = source.get("annotations")
                if annotations:
                    tag_keys_query = tag_keys_query.annotate(**annotations)
                if filters is True:
                    tag_keys_query = tag_keys_query.filter(self.query_filter)

                if type_filter and type_filter != source.get("type"):
                    continue
                exclusion = self._get_exclusions("key")
                tag_keys_query = tag_keys_query.exclude(exclusion).values(
                    "key").distinct().all()

                tag_keys.update({tag.get("key") for tag in tag_keys_query})

        return list(tag_keys)
Example #41
0
    def test_execute_query_ocp_aws_storage_with_group_by_tag_and_limit(self):
        """Test that data is grouped by tag key and limited."""
        with tenant_context(self.tenant):
            labels = OCPAWSCostLineItemDailySummary.objects\
                .filter(usage_start__gte=self.dh.last_month_start)\
                .filter(usage_start__lte=self.dh.last_month_end)\
                .filter(product_family__contains='Storage')\
                .values(*['tags'])\
                .first()

            tags = labels.get('tags')
            group_by_key = list(tags.keys())[0]
            plural_key = group_by_key + 's'

        url = reverse('reports-openshift-aws-storage')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-2',
            'filter[time_scope_units]': 'month',
            f'group_by[tag:{group_by_key}]': '*',
            'filter[limit]': 2
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json()
        data = data.get('data', [])
        # default ordered by usage
        previous_tag_usage = data[0].get(plural_key, [])[0].get(
            'values', [{}])[0].get('usage', {}).get('value')
        for entry in data[0].get(plural_key, []):
            current_tag_usage = entry.get('values',
                                          [{}])[0].get('usage',
                                                       {}).get('value')
            if 'Other' not in entry.get(group_by_key):
                self.assertTrue(current_tag_usage <= previous_tag_usage)
                previous_tag_usage = current_tag_usage
Example #42
0
    def test_remove_ocp_added_via_sources(self):
        """Remove ocp provider added via sources."""
        # Create Provider
        credentials = {"cluster_id": "cluster_id_1001"}
        provider_authentication = ProviderAuthentication.objects.create(credentials=credentials)
        with patch("masu.celery.tasks.check_report_updates"):
            provider = Provider.objects.create(
                name="ocpprovidername",
                created_by=self.user,
                customer=self.customer,
                authentication=provider_authentication,
            )
        provider_uuid = provider.uuid

        sources = Sources.objects.create(source_id=1, auth_header="testheader", offset=1, koku_uuid=provider_uuid)
        sources.save()
        delete_request = self._create_delete_request(self.user, {"Sources-Client": "True"})
        with tenant_context(self.tenant):
            manager = ProviderManager(provider_uuid)
            manager.remove(delete_request, from_sources=True)
        provider_query = Provider.objects.all().filter(uuid=provider_uuid)
        self.assertFalse(provider_query)
Example #43
0
 def test_error_neg_tier_usage_end(self):
     """Test error when trying to create a negative tiered usage_end."""
     rate = {
         'provider_uuid':
         self.provider.uuid,
         'metric':
         Rate.METRIC_CPU_CORE_USAGE_HOUR,
         'tiered_rate': [{
             'unit':
             'USD',
             'value':
             1.0,
             'usage_start':
             10.0,
             'usage_end': (round(Decimal(random.random()), 6) * -1)
         }]
     }
     with tenant_context(self.tenant):
         serializer = RateSerializer(data=rate)
         with self.assertRaises(serializers.ValidationError):
             if serializer.is_valid(raise_exception=True):
                 serializer.save()
Example #44
0
    def connect(self, message, **kwargs):
        try:
            tenant = get_tenant_model().objects.get(
                domain_url=get_host(message.content)
            )
        except get_tenant_model().DoesNotExist:
            logger.error(
                'no tenant found for {}'.format(
                    get_host(message.content)
                )
            )
            message.reply_channel.send({"close": True})
            return

        message.channel_session['schema'] = tenant.schema_name

        super().connect(message, **kwargs)

        Group(
            '{}_latest_blocks'.format(tenant.schema_name)
        ).add(
            message.reply_channel
        )

        with tenant_context(tenant):
            latest_blocks = Block.objects.exclude(
                height__isnull=True
            ).order_by(
                '-height'
            )[:20]

            kwargs.get('multiplexer').send(
                {
                    "latest_blocks": [
                        block.serialize() for block in latest_blocks
                    ]
                }
            )
Example #45
0
def ws_receive(message):
    message_dict = json.loads(message['text'])
    domain_url = message_dict['payload']['host']

    if domain_url == 'explorer.nubits.com':
        domain_url = 'nu.crypto-daio.co.uk'

    try:
        tenant = get_tenant_model().objects.get(domain_url=domain_url)
    except get_tenant_model().DoesNotExist:
        tenant = get_tenant_model().objects.get(domain_url='nu.crypto-test.co.uk')

    with tenant_context(tenant):
        if message['path'] == '/get_block_details/':
            get_block_details(message_dict, message)

            return

        if message['path'] == '/get_address_details/':
            try:
                address_object = Address.objects.get(address=message_dict.get('stream'))
                get_address_balance(address_object, message)
                get_address_details(address_object, message)
            except Address.DoesNotExist:
                pass

            return

        # if message['path'] == '/tx_browser/':
        #     if message_dict.get('stream') == 'add_nodes':
        #         add_onward_nodes(message_dict, message)
        #
        #     if message_dict.get('stream') == 'stop_nodes':
        #         try:
        #             user_socket = UserSocket.objects.get(
        #                 reply_channel=message.reply_channel
        #             )
        #             user_socket.tx_browser_running = False
        #             user_socket.save()
        #         except UserSocket.DoesNotExist:
        #             pass
        #
        #     return

        if message['path'] == '/get_current_grants/':
            get_current_grants(message)
            return

        if message['path'] == '/get_current_motions/':
            get_current_motions(message)
            return

        if message['path'] == '/all_blocks_list/':
            try:
                last_height = int(message_dict['payload']['last_height'])
            except ValueError:
                return

            get_next_blocks(message, last_height)
            return

        if message['path'] == '/latest_blocks/':
            get_latest_blocks(message)
Example #46
0
def post_schema_save(sender, **kwargs):
    tenant = kwargs['tenant']
    with tenant_context(tenant):
            create_superuser(tenant, tenant.email)