Beispiel #1
0
    def test_diagnostic_settings_table_storage_type(self):
        p = self.load_policy(
            {
                'name':
                'test-azure-storage',
                'resource':
                'azure.storage',
                'filters': [{
                    'type': 'value',
                    'key': 'name',
                    'op': 'glob',
                    'value_type': 'normalize',
                    'value': 'cctstorage*'
                }, {
                    'type': 'storage-diagnostic-settings',
                    'storage-type': 'table',
                    'key': 'logging.delete',
                    'value': False
                }],
            },
            validate=True)

        resources = p.run()
        self.assertEqual(1, len(resources))
        self.assertTrue(get_annotation_prefix('table') in resources[0])
Beispiel #2
0
    def get_backup_retention_policy(database, get_operation, cache_key):

        policy_key = get_annotation_prefix(cache_key)
        cached_policy = database.get(policy_key)
        if cached_policy:
            return cached_policy

        resource_group_name, server_name, database_name = \
            BackupRetentionPolicyHelper.get_backup_retention_policy_context(database)

        try:
            response = get_operation(resource_group_name, server_name,
                                     database_name)
        except CloudError as e:
            if e.status_code == 404:
                return None
            else:
                log.error(
                    "Unable to get backup retention policy. "
                    "(resourceGroup: {}, sqlserver: {}, sqldatabase: {})".
                    format(resource_group_name, server_name, database_name))
                raise e

        retention_policy = response.as_dict()
        database[policy_key] = retention_policy
        return retention_policy
    def __call__(self, i):
        if not self.cached_costs:
            self.cached_costs = self._query_costs()

        id = i['id'].lower() + "/"

        costs = [
            k.copy() for k in self.cached_costs
            if (k['ResourceId'] + '/').startswith(id)
        ]

        if not costs:
            return False

        if any(c['Currency'] != costs[0]['Currency'] for c in costs):
            self.log.warning(
                'Detected different currencies for the resource {0}. Costs array: {1}'
                .format(i['id'], costs))

        total_cost = {
            'PreTaxCost': sum(c['PreTaxCost'] for c in costs),
            'Currency': costs[0]['Currency']
        }
        i[get_annotation_prefix('cost')] = total_cost
        result = super(CostFilter, self).__call__(total_cost)
        return result
 def _write_metric_to_resource(self, resource, metrics_data, m):
     resource_metrics = resource.setdefault(
         get_annotation_prefix('metrics'), {})
     resource_metrics[self._get_metrics_cache_key()] = {
         'metrics_data': metrics_data.as_dict(),
         'measurement': m,
     }
Beispiel #5
0
    def _get_settings(self, storage_account, session=None, token=None):
        storage_prefix_property = get_annotation_prefix(self.storage_type)

        if not (storage_prefix_property in storage_account):
            settings = StorageSettingsUtilities.get_settings(
                self.storage_type, storage_account, session, token)
            storage_account[storage_prefix_property] = json.loads(jsonpickle.encode(settings))

        return storage_account[storage_prefix_property]
Beispiel #6
0
    def _get_settings(self, storage_account, session=None):
        storage_prefix_property = get_annotation_prefix(self.storage_type)

        if not (storage_prefix_property in storage_account):
            settings = StorageSettingsUtilities.get_settings(
                self.storage_type, storage_account, session)
            storage_account[storage_prefix_property] = serialize(settings)

        return storage_account[storage_prefix_property]
    def _check_resources(self, resources, event):
        result = []

        for r in resources:
            if get_annotation_prefix('last-execution') in r:
                continue
            history = self.client.exports.get_execution_history(self.scope, r['name'])

            # Include exports that has no execution history
            if not history.value:
                r[get_annotation_prefix('last-execution')] = 'None'
                result.append(r)
                continue

            last_execution = max(history.value, key=lambda execution: execution.submitted_time)
            if last_execution.submitted_time.date() <= self.min_date.date():
                r[get_annotation_prefix('last-execution')] = last_execution.serialize(True)
                result.append(r)

        return result
    def _process_resource(self, database):
        update_operation = getattr(self.client, self.operations_property).create_or_update

        resource_group_name, server_name, database_name = \
            BackupRetentionPolicyHelper.get_backup_retention_policy_context(database)
        parameters = self._get_parameters_for_new_retention_policy(database)

        new_retention_policy = update_operation(
            resource_group_name, server_name, database_name, parameters).result()

        # Update the cached version
        database[get_annotation_prefix(self.operations_property)] = new_retention_policy.as_dict()
 def _get_cached_metric_data(self, resource):
     metrics = resource.get(get_annotation_prefix('metrics'))
     if not metrics:
         return None
     return metrics.get(self._get_metrics_cache_key())