def load_azure_so_codes(self): vendor = VendorDetails.objects.filter( vendor_name=AppDefaults.cloud_vendor_codes(return_as='name', query_str='MS')).first() user = RedUser.objects.filter(is_superuser=True).first() for code in AppDefaults.azure_jba_codes(): data = { "product_name": code[1], "product_description": "Uses for SO generation", "vendor_details": vendor, "unit_cost": 0, "unit_price": 0, "standard_discount": 0, "product_status": 1, "product_featured": False, "product_billing_type": "Consumption", "product_jbacode": code[0], "product_created_by": user, "product_modified_by": user, "product_created_date": timezone.now(), "product_modified_date": timezone.now(), "product_currency": 1 } if Products.objects.filter(product_name=code[1]).exists(): product = Products.objects.filter(product_name=code[1]).first() product.__dict__.update(**data) product.save() print("Updated %s - %s" % (code[0], code[1])) else: Products.objects.create(**data) print("Loaded %s - %s" % (code[0], code[1]))
def update_hidden_fields(self, validated_data, instance=None): validated_data['modified_by'] = self.context['request'].user if not instance: validated_data['created_by'] = self.context['request'].user # Password formatting if 'iam_password' in validated_data['details'] and validated_data[ 'details']['iam_password'] is not None: cipher = AESCipher() validated_data['details']['iam_password'] = cipher.encrypt( validated_data['details']['iam_password']).decode() # Defining allow_order cloud_vendor_codes = AppDefaults.cloud_vendor_codes(return_as="codes") account_type = validated_data.get('type', None) if not account_type and instance is not None: account_type = AppDefaults.cloud_vendor_codes( return_as="code", query_str=instance.vendor.vendor_name) if account_type in cloud_vendor_codes: validated_data['details'][ 'allow_order'] = "Yes" if validated_data['active'] else "No" return validated_data
def create_predefined_roles(sender, **kwargs): """ This takes care of defining and creating set of predefined roles like partner_role """ from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from cloudapp.defaults import AppDefaults from users.models import Roles if ContentType.objects.filter( app_label='auth', model='group').exists() and ContentType.objects.filter( app_label='users', model='roles').exists(): predefined_roles = AppDefaults.get_predefined_roles() for role_alias, role_name in predefined_roles.items(): group_model = ContentType.objects.filter( app_label='auth', model='group')[0].model_class() # Creates new role if not created before if not group_model.objects.filter(name=role_name).exists(): access_specifiers = AppDefaults.get_predefined_role_access_specifiers( role_alias=role_alias) allowed_permissions_sets = [ AppDefaults.get_access_specifier_permissions(specifier)[0] for specifier in access_specifiers ] allowed_permissions = list( set([ item for sublist in allowed_permissions_sets for item in sublist ])) # Creating Group group_instance = group_model.objects.create(name=role_name) group_instance.permissions = Permission.objects.filter( id__in=allowed_permissions) if group_instance.save() is None: print( '\033[0;37;42m Generated new role "%s", Applying details... \033[0m' % role_alias) # Creating Role details role_instance = Roles.objects.create( group=group_instance, alias=role_alias, accesses=','.join(access_specifiers), description='Predefined role for %s' % role_alias) if role_instance.save() is None: print( '\033[0;37;42m Details applied for role: %s \033[0m' % role_alias) else: print('---- Error while generating predefined roles ---') print(' -Either auth.group or users.roles model does not exists !!!')
def update(self, instance, validated_data): details_data = validated_data.pop('details', None) # Defining Group name if 'alias' in details_data.keys(): if self.context['request'].user.is_superuser and details_data[ 'alias'] in AppDefaults.get_predefined_roles().keys(): validated_data['name'] = AppDefaults.get_predefined_roles()[ details_data['alias']] else: validated_data['name'] = self.context[ 'request'].user.username + '/' + details_data['alias'] details_data['modified_at'] = datetime.datetime.now() self.create_or_update_details(instance, details_data) instance = self.add_or_update_permissions(instance, details_data['accesses']) return super(GroupSerializer, self).update(instance, validated_data)
class NotificationActions(models.Model): action = models.CharField(max_length=50, choices=AppDefaults.get_notification_signals()) groups = models.ManyToManyField(NotificationGroups, related_name='actions') created_by = models.ForeignKey(RedUser, related_name="+") created_at = models.DateTimeField(auto_now_add=True) modified_by = models.ForeignKey(RedUser, related_name="+") modified_at = models.DateTimeField(auto_now=True) history = HistoricalRecords()
def create(self, validated_data): details_data = validated_data.pop('details', None) # Defining Group name if self.context['request'].user.is_superuser and details_data[ 'alias'] in AppDefaults.get_predefined_roles().keys(): validated_data['name'] = AppDefaults.get_predefined_roles()[ details_data['alias']] else: validated_data['name'] = self.context[ 'request'].user.username + '/' + details_data['alias'] group = super(GroupSerializer, self).create(validated_data) group.save() details_data['created_by'] = self.context['request'].user details_data['created_at'] = datetime.datetime.now() details_data['modified_at'] = datetime.datetime.now() self.create_or_update_details(group, details_data) group = self.add_or_update_permissions(group, details_data['accesses']) return group
def fetch_action_details(self, request, *args, **kwargs): action = kwargs['action'] if action == 'initial': action = AppDefaults.get_notification_signals()[0][0] queryset = self.get_queryset().filter(action=action) response = {'action_details': None} if len(queryset) > 0: response['action_details'] = self.serializer_class(queryset[0], context={ 'request': request }).data response['actions'] = AppDefaults.get_notification_signals() response['unmapped_groups'] = list( NotificationGroups.objects.filter(deleted=False).exclude( actions__action=action).values('id', 'name')) return Response(response)
def add_or_update_permissions(self, group, accesses): allowed_permissions = [] if accesses is not None: permitted_accesses = accesses.split(',') content_types_list = [] permissions_list = [] for access in permitted_accesses: array, permission_level = AppDefaults.get_access_specifier_permissions( access) if permission_level == 'content_types': content_types_list += array elif permission_level == 'permissions': permissions_list += array allowed_permissions = Permission.objects.filter( Q(id__in=permissions_list) | Q(content_type__in=content_types_list)) group.permissions = allowed_permissions return group
def jwt_response_payload_handler(token, user=None, request=None): """ Modifying jwt login response details """ user_details = UsersSerializer(user, context={'request': request}).data """ Fetching assigned accesses for the use """ user_details['accesses'] = list() if user.is_superuser: user_details[ 'accesses'] = AppDefaults.get_predefined_role_access_specifiers( 'Admin') else: access_joined = user.groups.all().values_list('details__accesses', flat=True) for string in access_joined: if string is not None: user_details['accesses'] += string.split(',') user_details['accesses'] = list(set(user_details['accesses'])) user_details['accesses'] = sorted(user_details['accesses']) return {'token': token, 'user': user_details}
class CloudAccounts(models.Model): ACCOUNT_TYPES = AppDefaults.cloud_vendor_codes() customer = models.ForeignKey(Customers, related_name='cloud_accounts') type = models.CharField(max_length=50, choices=ACCOUNT_TYPES) details = JSONField(default=None) licenses_and_credentials = JSONField(default=None, null=True, blank=True) active = models.BooleanField(default=False) created_by = models.ForeignKey(RedUser, related_name="+") created_at = models.DateTimeField(auto_now_add=True) modified_by = models.ForeignKey(RedUser, related_name="+") modified_at = models.DateTimeField(auto_now=True) history = HistoricalRecords() def get_account_type_value(self, label=None): """ Obtaining Account type key for given label """ if label is not None: index = list(map(lambda x: x[1], self.ACCOUNT_TYPES)).index(label) return self.ACCOUNT_TYPES[index][0] else: return None
def storeRateCard(self): access_headers = self.getAccessHeaders() rate_card_url = 'https://api.partnercenter.microsoft.com/v1/ratecards/azure?currency=INR®ion=IN' ratecard_out = requests.get(rate_card_url, headers=access_headers) ratecard_out.encoding = 'utf-8-sig' ratecard = ratecard_out.text out_file = open(self.ratecard_file, 'w') out_file.write(ratecard) out_file.close() if os.path.exists(self.ratecard_file): proc = subprocess.Popen([ "jq", "-c", '.meters[] | [.id, .category,.subcategory, .region, .name, .rates."0"]' ], stdout=subprocess.PIPE, stdin=open(self.ratecard_file)) azure_product = Products.objects.filter( vendor_details__vendor_name=AppDefaults.cloud_vendor_codes( return_as='name', query_str='MS')).first() for line in proc.stdout.readlines(): line = json.loads(line.decode('utf-8')) try: obj = CloudRates.objects.get(uuid=line[0]) obj.rate = line[5] obj.save() print("Updated for %s in region %s" % (line[3], line[2])) except ObjectDoesNotExist: CloudRates.objects.create(product=azure_product, rateName=line[4], region=line[3], category=line[1], subcategory=line[2], rate=line[5], uuid=line[0]) print("Inserted for %s in region %s" % (line[3], line[2]))
def handle(self, *args, **options): type_of_operation = options['type'] if not type_of_operation: print("Need type of operation") sys.exit(-1) vendor = options['vendor'] if vendor != 'ALL' and not AppDefaults.cloud_vendor_codes(return_as='code', query_str=vendor): print("Vendor should either be ALL to process all types or individual ones to access just that cloud vendor") sys.exit(-2) vendor_ids = { 'AWS': 1, 'SoftLayer': 2, 'MS': 3 } partners = [] if vendor == 'ALL': partners = Partner.objects.filter(id__in=(CloudAccounts.objects.filter(active=1) \ .values('customer__partner').distinct())) else: partners = Partner.objects.filter(id__in=(CloudAccounts.objects.filter(vendor_id=vendor_ids[vendor], active=1) \ .values('customer__partner').distinct())) if (type_of_operation == 'instance_details'): print("About to call update_aws_details for partners, Total count: %d " % (partners.count())) for partner in partners: print("Calling update_aws_details for Partner: %s " % (partner.pk,)) update_aws_details.apply_async(args=[partner.pk]) elif (type_of_operation == 'instance_metrics'): print("About to call get_metrics for partners, Total count: %d " %(partners.count())) for partner in partners: print("Calling get_metrics for Partner: %s " %(partner.pk,)) get_metrics.apply_async(args=[partner.pk])
def generate_report(self): order_items = OrderItems.objects.filter( order__vendor__vendor_name=AppDefaults.cloud_vendor_codes(return_as='name', query_str='AWS'), order__created_at__gte=self.start_date, order__created_at__lte=self.end_date ) orders_data_list = order_items \ .annotate(order_number=F('order__order_number'), order_status=F('order__status'), partner=F('order__partner__company_name'), partner_jbacode=F('order__partner__jba_code'), customer=F('order__customer__company_name'), customer_id=F('order__customer__id'), total_cost=F('order__total_cost'), machine_image_name=F('cloudtemplate__image__name'), machine_storage_name=F('cloudtemplate__storage__name') ) \ .values('order_number', 'order_status', 'partner', 'partner_jbacode', 'customer', 'customer_id', 'machine_image_name', 'machine_storage_name', 'quantity', 'discount', 'cost', 'total_cost') for order in orders_data_list: order['order_status'] = list(filter(lambda s: s[0] == order['order_status'], order_statuses))[0][1] order['product_name'] = order.pop('machine_storage_name') \ if order['machine_storage_name'] is not None \ else order.pop('machine_image_name') """ Fetching customer's cloud account details """ cloud_acc_details = {'iam_username': '', 'payer_account_id': '', 'iam_url': '', 'friendly_name': '', 'account_id': '', 'delivery_sequence': '', 'mrr': '', 'workload': '', 'reference_number': '', 'estimate_url': '', 'root_email': ''} ca = CloudAccounts.objects.filter(type='AWS', customer__pk=order['customer_id']) if ca.exists(): ca = ca.first() cloud_acc_details.update({ 'payer_account_id': ca.details.get('payer_account_id', ''), 'account_id': ca.details.get('account_id', ''), 'iam_url': ca.details.get('iam_url', ''), 'root_email': ca.details.get('root_email', ''), 'reference_number': ca.details.get('reference_number', ''), 'delivery_sequence': ca.details.get('delivery_sequence', ''), 'mrr': ca.details.get('mrr', ''), 'workload': ca.details.get('workload', ''), 'estimate_url': ca.details.get('estimate_url', '') }) order.update(cloud_acc_details) file_generator = ReportList() file_response = file_generator.export(report_name='aws_orders_report', screen_name='aws_orders_report_{}_to_{}'.format( self.start_date.strftime('%Y_%m_%d'), self.end_date.strftime('%Y_%m_%d') ), export_type='csv', data_list=orders_data_list) headers = file_response._headers attachments = [ { 'filename': headers['content-disposition'][1].split(';')[1].split('=')[1], 'content': file_response.content, 'mimetype': headers['content-type'][1] } ] """ Sending Emails """ send_mail_notifications.send(sender=Orders, trigger='AwsOrdersReport', details={ 'subject': 'AWS ORDERS {}LY REPORT'.format(self.range.upper()), 'message': 'Please find the AWS orders report file for the duration : %s to %s' % ( self.start_date.date(), self.end_date.date()), 'attachments': attachments }) return True
def getUtilization(self): access_headers = self.getAccessHeaders() url = 'https://api.partnercenter.microsoft.com/v1/customers/' \ '{}/subscriptions/{}/utilizations/azure?' \ 'start_time={}&end_time={}&granularity=Daily&show_details=True'. \ format(self.tenantId, self.subscriptionId, self.startDate, self.endDate) utilization_records_out = requests.get(url, headers=access_headers) utilization_records_out.encoding = 'utf-8-sig' utilization_records = utilization_records_out.text self.process_records(utilization_records, self.grouped_records, self.grouped_calculations, self.consolidated_rates) if len(self.consolidated_rates) > 0: """ Querying vendor & customer """ vendor = VendorDetails.objects.filter(vendor_name=AppDefaults.cloud_vendor_codes(return_as='name', query_str='MS')).first() account_type = AppDefaults.cloud_vendor_codes(return_as='code', query_str=vendor.vendor_name) cloud_accounts = CloudAccounts.objects.filter(details__tenant_id=self.tenantId.upper(), type=account_type ) """ Try for lowercase """ if not cloud_accounts.exists(): cloud_accounts = CloudAccounts.objects.filter(details__tenant_id=self.tenantId.lower(), type=account_type ) customer = None if cloud_accounts.exists(): cloud_account = cloud_accounts.first() customer = cloud_account.customer customer_cloud_acc_details = cloud_account.details standard_discount = 10 if 'standard_discount' in customer_cloud_acc_details \ and customer_cloud_acc_details['standard_discount'] is not None \ and customer_cloud_acc_details['standard_discount'] != '': standard_discount = float(customer_cloud_acc_details['standard_discount']) for name, entries in self.consolidated_rates.items(): by_region = {} totals = 0 for entry in entries: name_with_location = str.format('{}|{}', name, entry[6]) region_entry = by_region.setdefault(name_with_location, []) region_entry.append(entry) totals = totals + entry[7] for item in by_region: split_values = item.split('|') if split_values: product_name = split_values[0] location = split_values[1] daily_records = by_region[item] for rec in daily_records: start_date = self.str_to_datetime(rec[0]) date_of_recording = None if start_date.month == 1: if start_date.day >= 22: date_of_recording = datetime.datetime(start_date.year, start_date.month, 22, 0, 0, 0, tzinfo=pytz.UTC) else: date_of_recording = datetime.datetime(start_date.year - 1, 12, 22, 0, 0, 0, tzinfo=pytz.UTC) else: if start_date.day >= 22: date_of_recording = datetime.datetime(start_date.year, start_date.month, 22, 0, 0, 0, tzinfo=pytz.UTC) else: date_of_recording = datetime.datetime(start_date.year, start_date.month - 1, 22, 0, 0, 0, tzinfo=pytz.UTC) # Check if there isa record on the 22nd (as we store all storage only on the 22nd consumption = CloudServiceConsumptions.objects.filter( linked_account_id=self.tenantId, subscription_id=self.subscriptionId, item_description=product_name, region=location, usage_start_date=date_of_recording ) cloud_rate = CloudRates.objects.get(uuid=rec[2]) if consumption.exists(): consumption = consumption[0] consumption.usage_quantity = consumption.usage_quantity + Decimal(rec[7]) if consumption.usage_quantity > 5: cost = calculate_azure_partner_cost( (float(consumption.usage_quantity) - 5) * float(cloud_rate.rate), standard_discount) consumption.unblended_cost = Decimal(cost) consumption.save() else: consumption = CloudServiceConsumptions() consumption.customer = customer consumption.vendor = vendor consumption.record_id = cloud_rate.uuid consumption.usage_start_date = date_of_recording end_date = date_of_recording + timedelta(days=1) consumption.usage_end_date = end_date consumption.payer_account_id = self.csp_domain consumption.linked_account_id = self.tenantId consumption.pricing_plan_id = '' consumption.product_name = rec[4] consumption.usage_type = rec[5] consumption.item_description = rec[3] consumption.usage_quantity = rec[7] consumption.region = location if location else 'N/A' consumption.rate_id = cloud_rate.id consumption.subscription_id = self.subscriptionId consumption.unblended_cost = 0 # Always 0 when we start consumption.save() # pprint.pprint(by_region) pprint.pprint(self.grouped_records) pprint.pprint(self.grouped_calculations) total = functools.reduce(lambda x, y: x + y, self.grouped_calculations.values()) pprint.pprint(total)
def process_records(self, utilization_records, grouped_records, grouped_calculations, consolidated_rates): out_file = open('/tmp/{}.json'.format(self.subscriptionId), 'w') out_file.write(utilization_records) out_file.close() if os.path.exists('/tmp/{}.json'.format(self.subscriptionId)): proc = subprocess.Popen( ["jq", "-c", '.items[] | [(.usageStartTime | sub("(?<before>.*)[-+]\\\\d{2}:\\\\d{2}"; .before ) | ' 'strptime("%Y-%m-%dT%H:%M:%S") | strftime("%Y-%m-%d")), ' '(.usageEndTime | sub("(?<before>.*)[-+]\\\\d{2}:\\\\d{2}"; .before ) | ' 'strptime("%Y-%m-%dT%H:%M:%S") | strftime("%Y-%m-%d")), ' '.resource.id, .resource.name, .resource.category, .resource.subcategory, .resource.region, .quantity]' ], stdout=subprocess.PIPE, stdin=open('/tmp/{}.json'.format(self.subscriptionId))) """ Querying vendor & customer """ vendor = VendorDetails.objects.filter(vendor_name=AppDefaults.cloud_vendor_codes(return_as='name', query_str='MS')).first() account_type = AppDefaults.cloud_vendor_codes(return_as='code', query_str=vendor.vendor_name) cloud_accounts = CloudAccounts.objects.filter(details__tenant_id=self.tenantId.upper(), type=account_type ) """ Try for lowercase """ if not cloud_accounts.exists(): cloud_accounts = CloudAccounts.objects.filter(details__tenant_id=self.tenantId.lower(), type=account_type ) customer = None if cloud_accounts.exists(): cloud_account = cloud_accounts.first() customer = cloud_account.customer customer_cloud_acc_details = cloud_account.details standard_discount = 10 if 'standard_discount' in customer_cloud_acc_details \ and customer_cloud_acc_details['standard_discount'] is not None \ and customer_cloud_acc_details['standard_discount'] != '': standard_discount = float(customer_cloud_acc_details['standard_discount']) else: sys.exit( '\033[0;37;41mSeems there is no customer for tenant id: %s. Terminating ...\033[0m' % self.tenantId) for line in proc.stdout.readlines(): line = json.loads(line.decode()) utilization_start_date = self.str_to_datetime(line[0]) utilization_end_date = self.str_to_datetime(line[1]) resource_uuid = line[2] name = line[3] category = line[4] subcategory = line[5] location = line[6] quantity = line[7] if name in self.ignored_rate_names: continue if name in self.consolidated_rate_names: consolidated_rate_name_value = consolidated_rates.setdefault(name, []) consolidated_rate_name_value.append(line) continue try: cloud_rate = CloudRates.objects.get(uuid=resource_uuid) full_name = str.format('{}|{}|{}|{}', category, subcategory, name, location) current_util = grouped_records.setdefault(full_name, 0) grouped_records[full_name] = current_util + quantity current_prices = grouped_calculations.setdefault(full_name, 0) grouped_calculations[full_name] = current_prices + (quantity * float(cloud_rate.rate)) # Store in the DB consumption = CloudServiceConsumptions() consumption.customer = customer consumption.vendor = vendor consumption.record_id = cloud_rate.uuid consumption.usage_start_date = utilization_start_date consumption.usage_end_date = utilization_end_date consumption.payer_account_id = self.csp_domain consumption.linked_account_id = self.tenantId consumption.pricing_plan_id = '' consumption.product_name = category consumption.usage_type = subcategory consumption.item_description = name consumption.usage_quantity = quantity consumption.region = location if location else 'N/A' consumption.rate_id = cloud_rate.id consumption.subscription_id = self.subscriptionId consumption.unblended_cost = calculate_azure_partner_cost(quantity * float(cloud_rate.rate), standard_discount) consumption.save() except ObjectDoesNotExist: print( "could not find for %s %s %s %s %s" % ( category, subcategory, name, location, utilization_start_date)) # Delete the file os.remove('/tmp/{}.json'.format(self.subscriptionId)) # Check if there are further entries json_output = json.loads(utilization_records) if 'next' in json_output['links']: url = 'https://api.partnercenter.microsoft.com/v1/' + json_output['links']['next']['uri'] continuation_header = {json_output['links']['next']['headers'][0]['key']: json_output['links']['next']['headers'][0]['value']} access_headers = self.getAccessHeaders() access_headers.update(continuation_header) utilization_records_out = requests.get(url, headers=access_headers) utilization_records_out.encoding = 'utf-8-sig' utilization_records = utilization_records_out.text self.process_records(utilization_records, grouped_records, grouped_calculations, consolidated_rates)
def handle(self, *args, **options): migration_csvs = os.path.join(settings.BASE_DIR, 'migrations') partner_map = {} print("Creating Partners...") with open(os.path.join(migration_csvs, 'partners.csv'), 'r', encoding='latin1') as csvfile: reader = csv.reader(csvfile) for row in reader: if not row[0]: print("Cannot find partner identifier for ", row[0]) continue partner = Partner() partner.company_name = row[3] if row[1] == 'Y': partner.status = True else: partner.status = False if row[28] == 'Y': partner.existing_status = True else: partner.existing_status = False partner.jba_code = str.strip(row[23]) partner.credits = row[24] partner.address_1 = row[4] partner.address_2 = row[5] partner.address_3 = row[6] partner.city = row[7] partner.state = row[8] partner.pin_code = row[9] partner.partner_type = 'R' partner.business_type = 'S' partner.focused_customer = '' partner.business_type = '' partner.interested_workload = '' original_date = row[21].split(' ') try: val = datetime.datetime.strptime(original_date[0], '%m/%d/%y') except ValueError: val = datetime.datetime.strptime(original_date[0], '%m/%d/%Y') partner.created_at = val partner.activated_at = val for field in partner._meta.fields: if field.name == 'created_at' or field.name == 'activated_at': field.auto_now_add = False partner.created_by = 1 partner.save() partner_id = partner.id partner_map[row[0].strip()] = partner_id c = ContactDetails() c.partner = partner c.type = 'P' c.name = str.format('{} {}', row[10], row[11]) c.email = row[13] c.mobile = row[12] c.save() contact_types = ['D/O', 'A&O', 'S'] for type in contact_types: c = ContactDetails() c.partner = partner c.type = type c.name = str.format('{} {}', row[10], row[11]) c.email = row[13] c.mobile = row[12] c.save() customer_map = {} print("Creating Customers...") with open(os.path.join(migration_csvs, 'customers.csv'), 'r', encoding='latin1') as csvfile: reader = csv.reader(csvfile) for row in reader: customer = Customers() if not row[1].strip() in partner_map.keys(): import pdb pdb.set_trace() print("Cannot find partner for customer: ", row[1]) continue customer.partner_id = partner_map[row[1].strip()] customer.company_name = row[2] customer.address = row[3] customer.city = row[6] customer.state = row[7] customer.country = row[8] customer.Pincode = row[9] customer.pan_number = row[22] customer.deleted = False original_date = row[24].split(' ') try: val = datetime.datetime.strptime(original_date[0], '%m/%d/%y') except ValueError: val = datetime.datetime.strptime(original_date[0], '%m/%d/%Y') customer.created_at = val for field in customer._meta.fields: if field.name == 'created_at': field.auto_now_add = False customer.save() customer_map[row[0]] = customer.id customer_contact = CustomerContacts() customer_contact.customer = customer customer_contact.name = str.format('{} {} {}', row[10], row[11], row[12]) customer_contact.position = row[13] customer_contact.email = row[15] customer_contact.mobile = row[14] customer_contact.save() customer_contact2 = CustomerContacts() customer_contact2.customer = customer customer_contact2.name = str.format('{} {} {}', row[16], row[17], row[18]) customer_contact2.position = row[19] customer_contact2.email = row[21] customer_contact2.mobile = row[20] customer_contact2.save() import json customer_contents = json.dumps(customer_map) f = open('/tmp/customer_map.json', 'w') f.write(customer_contents) f.close() print("Creating Users...") with open(os.path.join(migration_csvs, 'users.csv'), 'r', encoding='latin1') as csvfile: reader = csv.reader(csvfile) for row in reader: if not row[3].strip() in partner_map.keys(): print("Cannot find partner for user: "******"Found Redington user: {}, Please create manually", row[3]) continue user = RedUser() user.username = row[3] user.first_name = row[4] user.last_name = row[5] user.email = row[6] user.is_active = True user.is_staff = False user.save() permission_group_name = AppDefaults.get_predefined_roles() if Group.objects.filter( name=permission_group_name['Partner']).exists(): user.groups = Group.objects.filter( name=permission_group_name['Partner']) user.save() PartnerUserDetails.objects.create( user=user, partner=Partner.objects.get( pk=partner_map[row[3].strip()])) UserProfile.objects.create( user=user, user_type='P', description=str.format('Partner user for {}', row[3]), created_by=RedUser.objects.get(username='******'), role_id=2)
def get_metrics(self): instances = CloudInstances.objects.filter( deleted=0, customer__in=self.partner.customers_set.all()) for instance in instances: if instance.product.vendor_details.vendor_name == AppDefaults.cloud_vendor_codes( return_as='name', query_str='AWS'): cloudaccount = instance.customer.cloud_accounts.filter( type='AWS').first() # AWS Instance sts = boto3.client( 'sts', aws_access_key_id=AWS_AUTH['AWS_ACCESS_KEY'], aws_secret_access_key=AWS_AUTH['AWS_SECRET_KEY'], region_name=instance.region) role_details = sts.assume_role(RoleArn=str.format( 'arn:aws:iam::{}:role/ec2_crossrole', cloudaccount.details['account_id']), RoleSessionName='sts_access') credentials = role_details['Credentials'] cloudwatch = boto3.client( 'cloudwatch', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], region_name=instance.region) # Check the last run for the various metrics metrics = [ 'CPUUtilization', 'DiskReadBytes', 'DiskWriteBytes', 'NetworkIn', 'NetworkOut' ] for metric in metrics: instance_metric_run = InstanceMetricsRuns.objects.filter( instance=instance, metrics_type=metric) start_Time = None if instance_metric_run.count() == 0: # First run start_Time = datetime.datetime.now() - timedelta( days=1) else: start_Time = instance_metric_run[0].last_run endTime = datetime.datetime.now() response = cloudwatch.get_metric_statistics( Namespace='AWS/EC2', MetricName=metric, Dimensions=[{ 'Name': 'InstanceId', 'Value': instance.instance_id }], StartTime=start_Time, EndTime=endTime, Period=300, Statistics=['Average']) for datapoint in response['Datapoints']: instancemetrics = InstanceMetrics() instancemetrics.instance = instance instancemetrics.vendor_type = instance.product.vendor_details.vendor_id instancemetrics.metrics_type = metric instancemetrics.value = datapoint['Average'] instancemetrics.time_stamp = datapoint['Timestamp'] instancemetrics.save() if instance_metric_run.count() == 0: InstanceMetricsRuns.objects.create( instance=instance, vendor_type=instance.product.vendor_details. vendor_id, metrics_type=metric, last_run=endTime) else: instance_metric = instance_metric_run[0] instance_metric.last_run = endTime instance_metric.save() elif instance.product.vendor_details.vendor_name == AppDefaults.cloud_vendor_codes( return_as='name', query_str='MS'): cloudaccount = instance.customer.cloud_accounts.filter( type='MS').first() # Azure tenant_id = cloudaccount.details['tenant_id'] subscription = Subscriptions.objects.filter( customer=instance.customer, name='Microsoft Azure', status='active') if subscription.count() == 0: return # TODO: Inform that there is no subscription elif not tenant_id: return # TODO: Inform that there is no subscription subscription_id = subscription[0].subscription metrics = [ 'Percentage CPU', 'Network In', 'Network Out', 'Disk Read Bytes', 'Disk Write Bytes' ] instance_id = instance.instance_details['id'] login_url = str.format( 'https://login.windows.net/{}/oauth2/token', tenant_id) auth_params = { 'grant_type': 'password', 'client_id': MSFT_PASSWORD_CLIENT_ID, 'scope': 'openid', 'resource': 'https://management.azure.com/', 'username': AZURE_AUTH['AZURE_AD_USER'], 'password': AZURE_AUTH['AZURE_PASSWORD'] } auth = requests.post(login_url, auth_params) if auth.status_code == 200: auth_json = auth.json() auth_token = auth_json['access_token'] for metric in metrics: instance_metric_run = InstanceMetricsRuns.objects.filter( instance=instance, metrics_type=metric) startTime = datetime.datetime.utcnow() startTime_string = None if instance_metric_run.count() == 0: # First run ((1 day from today startTime = startTime + timedelta(days=-1) startTime_string = startTime.strftime( '%Y-%m-%dT%H:%M:%SZ') else: startTime = instance_metric_run[0].last_run startTime_string = startTime.strftime( '%Y-%m-%dT%H:%M:%SZ') endTime = datetime.datetime.utcnow() endTime_string = endTime.strftime('%Y-%m-%dT%H:%M:%SZ') metric_url = str.format( 'https://management.azure.com/{}/providers/microsoft.insights/metrics', instance_id) metric_url_query_params = { 'api-version': '2016-09-01', '$filter': str.format( "name.value eq '{}' and aggregationType eq 'Average' and startTime eq {} and endTime eq {} and timeGrain eq duration'PT5M'", metric, startTime_string, endTime_string) } metrics_output = requests.get( metric_url, params=metric_url_query_params, headers={ 'Accept': 'application/json', 'Authorization': 'Bearer %s' % (auth_token, ) }) if metrics_output.status_code == 200: metrics_out = metrics_output.json() datapoints = metrics_out['value'][0]['data'] for datapoint in datapoints: instancemetrics = InstanceMetrics() instancemetrics.instance = instance instancemetrics.vendor_type = instance.product.vendor_details.vendor_id instancemetrics.metrics_type = metric instancemetrics.value = datapoint.get( 'average', 0) instancemetrics.time_stamp = datapoint[ 'timeStamp'] instancemetrics.save() if instance_metric_run.count() == 0: InstanceMetricsRuns.objects.create( instance=instance, vendor_type=instance.product. vendor_details.vendor_id, metrics_type=metric, last_run=endTime) else: instance_metric = instance_metric_run[0] instance_metric.last_run = endTime instance_metric.save() else: print("Error Fetching metrics, Investigate") continue else: print("Error authenticating to Domain: %s" % (tenant_id, )) return #TODO: Inform about the failure