def handle(self, *args, **options): logger = logger_setup('mgmt_load_source_file') if (len(args) < 2): raise CommandError( 'See "./manage.py help load_source_file" for usage') if args[0] not in SourceFile.FILE_TYPES: raise CommandError( '"{}" is not a valid file type. Options are: {}'.format( args[0], ', '.join(SourceFile.FILE_TYPES.keys()))) file_type = SourceFile.FILE_TYPES[args[0]] files = args[1:] for f in files: if not os.path.isfile(f): raise CommandError( '"{}" is not a file path, aborting'.format(f)) for f in files: if os.path.getsize(f) > 0: source_file, created = SourceFile.objects.get_or_create( file_type=file_type, file_name=os.path.basename(f)) if not created: source_file.data.delete() source_file.data.save(os.path.basename(f), File(open(f, 'rb')), save=True) logger.info('SourceFile id {} ({}, {}) updated'.format( source_file.id, args[0], os.path.basename(f))) else: logger.info('SourceFile id {} IGNORED: File size zero'.format( os.path.basename(f)))
def handle(self, *args, **options): logger = logger_setup('mgmt_load_source_file') if (len(args) < 2): raise CommandError('See "./manage.py help load_source_file" for usage') if args[0] not in SourceFile.FILE_TYPES: raise CommandError('"{}" is not a valid file type. Options are: {}'.format(args[0], ', '.join(SourceFile.FILE_TYPES.keys()))) file_type = SourceFile.FILE_TYPES[args[0]] files = args[1:] for f in files: if not os.path.isfile(f): raise CommandError('"{}" is not a file path, aborting'.format(f)) for f in files: if os.path.getsize(f) > 0: source_file, created = SourceFile.objects.get_or_create(file_type=file_type, file_name=os.path.basename(f)) if not created: source_file.data.delete() source_file.data.save(os.path.basename(f), File(open(f, 'rb')), save=True) logger.info('SourceFile id {} ({}, {}) updated'.format(source_file.id, args[0], os.path.basename(f))) else: logger.info('SourceFile id {} IGNORED: File size zero'.format(os.path.basename(f)))
def freshdesk_cache_agents(): """Cache a list of Freshdesk agents as contacts, as the API treats Agents differently to Contacts. """ logger = logger_setup('freshdesk_cache_agents') agents = get_freshdesk_objects(obj_type='agents', progress=False) for i in agents: data = i['contact'] data['contact_id'] = i['id'] data['created_at'] = parse(data['created_at']) data['updated_at'] = parse(data['updated_at']) data.pop('last_login_at', None) fc, create = FreshdeskContact.objects.update_or_create(contact_id=data['contact_id'], defaults=data) if create: logger.info('{} created'.format(fc)) else: logger.info('{} updated'.format(fc)) # Attempt to match with a DepartmentUser. fc.match_dept_user()
def handle(self, *args, **options): logger_headers = logger_setup('freshdesk_api_response_headers') # Begin by caching Agents as Contacts. utils_freshdesk.freshdesk_cache_agents() # Next, start caching tickets one page at a time. url = settings.FRESHDESK_ENDPOINT + '/tickets' # By default, the 'list tickets' API returns tickets created in the # past 30 days only. If older tickets need to be cached, modify the # params dict below to include a value for "updated_since". # Ref: https://developer.freshdesk.com/api/#list_all_tickets params = {'page': 1, 'per_page': 100} further_results = True cached_count = 0 while further_results: if options['limit'] and (cached_count + params['per_page']) >= options['limit']: params['per_page'] = options['limit'] - cached_count r = requests.get(url, auth=settings.FRESHDESK_AUTH, params=params) logger_headers.info(json.dumps(dict(r.headers))) # If we've been rate-limited, response status will be 429. # Sleep for the number of seconds specifief by the Retry-After header. if r.status_code == 429: if 'retry-after' in r.headers: naptime = r.headers['retry-after'] else: naptime = 3600 # Sleep for an hour. sleep(naptime) # If the response elif r.status_code == 200: if 'link' not in r.headers: # No further paginated results. further_results = False else: params['page'] += 1 tickets = r.json() cache = utils_freshdesk.freshdesk_cache_tickets(tickets) if not cache: # Error! further_results = False cached_count += len(tickets) if options['limit'] and cached_count >= options['limit']: print('Caching limit reached; terminating.') further_results = False else: further_results = False
def freshdesk_cache_agents(): """Cache a list of Freshdesk agents as contacts, as the API treats Agents differently to Contacts. """ logger = logger_setup('freshdesk_cache_agents') agents = get_freshdesk_objects(obj_type='agents', progress=False) for i in agents: data = i['contact'] data['contact_id'] = i['id'] data['created_at'] = parse(data['created_at']) data['updated_at'] = parse(data['updated_at']) data.pop('last_login_at', None) fc, create = FreshdeskContact.objects.update_or_create( contact_id=data['contact_id'], defaults=data) if create: logger.info('{} created'.format(fc)) else: logger.info('{} updated'.format(fc)) # Attempt to match with a DepartmentUser. fc.match_dept_user()
def save_model(self, request, obj, form, change): """Override save_model in order to log any changes to some fields: 'given_name', 'surname', 'employee_id', 'cost_centre', 'name', 'org_unit' """ logger = logger_setup('departmentuser_updates') l = 'DepartmentUser: {}, field: {}, original_value: {} new_value: {}, changed_by: {}, reference: {}' if obj._DepartmentUser__original_given_name != obj.given_name: logger.info(l.format( obj.email, 'given_name', obj._DepartmentUser__original_given_name, obj.given_name, request.user.username, obj.name_update_reference )) if obj._DepartmentUser__original_surname != obj.surname: logger.info(l.format( obj.email, 'surname', obj._DepartmentUser__original_surname, obj.surname, request.user.username, obj.name_update_reference )) if obj._DepartmentUser__original_employee_id != obj.employee_id: logger.info(l.format( obj.email, 'employee_id', obj._DepartmentUser__original_employee_id, obj.employee_id, request.user.username, obj.name_update_reference )) if obj._DepartmentUser__original_cost_centre != obj.cost_centre: logger.info(l.format( obj.email, 'cost_centre', obj._DepartmentUser__original_cost_centre, obj.cost_centre, request.user.username, obj.name_update_reference )) if obj._DepartmentUser__original_name != obj.name: logger.info(l.format( obj.email, 'name', obj._DepartmentUser__original_name, obj.name, request.user.username, obj.name_update_reference )) if obj._DepartmentUser__original_org_unit != obj.org_unit: logger.info(l.format( obj.email, 'org_unit', obj._DepartmentUser__original_org_unit, obj.org_unit, request.user.username, obj.name_update_reference )) obj.save()
def validate_lpr(lpr_id): from tracking.utils import logger_setup # Avoid circular import. logger = logger_setup('task_validate_lpr') """Async task that accepts a LocalPropertyRegister object, and then iterates through the uploaded spreadsheet to validate data and record any failures in the Incredibus database. """ try: lpr = LocalPropertyRegister.objects.get(pk=lpr_id) except ObjectDoesNotExist: logger.error('LocalPropertyRegister object not found ({})'.format(lpr_id)) return False # Read source data into an iterable, prior to validation. # Data standard: # - Uploaded file must be XLS, XLSX or CSV (validated by upload form). # - [Excel] Data to be validated is on the first worksheet. # - The first row of data is always a header row (skipped). # - Column 1 contains device serial numbers, in text format. # - Column 2 contains the assigned cost centre number, in integer format. logger.info('Handling {}'.format(lpr.uploaded_file.name)) # Use the csv standard lib for CSV files. if os.path.splitext(lpr.uploaded_file.name)[1] == '.csv': rows = sum(1 for line in open(lpr.uploaded_file.path)) data = csv_data(lpr.uploaded_file.path) data.next() # Skip header row. data = [i for i in data] # Use the openpyxl lib for XLSX files elif os.path.splitext(lpr.uploaded_file.name)[1] == '.xlsx': workbook = load_workbook(filename=lpr.uploaded_file.path, read_only=True) sheet = workbook.active # Sheet index 0. data = [] for row in sheet.rows: data.append([cell.value for cell in row]) data.pop(0) # Pop out the header row. # Use the xlrd lib for XLS files. else: workbook = open_workbook(lpr.uploaded_file.path) sheet = workbook.sheet_by_index(0) rows = sheet.nrows data = [] for i in range(1, rows): # Skip header row. data.append([sheet.cell(i, 0).value, sheet.cell(i, 1).value]) # Row validation rules: # * Column 1 is a serial number. # * Column 1 cannot be empty/blank. # * Column 1 has a maximum length of 128 characters. # * Column 2 is a cost centre number. # * Column 2 cannot be empty/blank. # * Column 2 must be interpreted as an integer value. for row_num, row in enumerate(data, 2): # Note that we start row_num at 2 to make sense for validation messages. failures = [] # Serial number column if not row[0]: failures.append('Serial number is blank for row {}'.format(row_num)) logger.warn('Serial number is blank for row {}'.format(row_num)) else: # Account for text-encodings (Latin1), trailing spaces, etc. serial = row[0].replace(u'\xa0', u' ') serial = unidecode(serial).strip().upper() if len(serial) > 128: failures.append('Serial number is greater than 128 characters') logger.warn('Serial number is >128 chars for row {}'.format(row_num)) # Cost centre column if not row[1]: failures.append('Cost centre is blank for row {}'.format(row_num)) logger.warn('Cost centre is blank for row {}'.format(row_num)) else: # Account for text-encodings (Latin1), trailing spaces, etc. cc = unidecode(str(row[1])).strip() try: cc = int(cc) except ValueError: # CC can't be cast as integer. failures.append('Cost centre {} is invalid for row {}'.format( cc, row_num)) logger.warn('Cost centre {} is invalid for row {}'.format( cc, row_num)) # Fatal validation errors: log any failures and skip to the next row. if failures: for f in failures: LprValidation.objects.create(lpr=lpr, row=row_num, validation=f) continue # No validation errors so far; try matching a Computer or Mobile. computer = None mobile = None # Computer qs = Computer.objects.filter(serial_number=serial) if qs.count() > 1: failures.append('Serial no {} for row {} matches >1 computer in the db'.format( serial, row_num)) logger.warn('Serial no {} for row {} matches >1 computer in the db'.format( serial, row_num)) elif qs.exists(): computer = Computer.objects.get(serial_number=serial) cc_no = computer.cost_centre_no if cc_no and cc_no != cc: # Already assigned to a CC, <> the CC supplied. failures.append('''Computer with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) logger.warn('''Computer with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) # If CC is not set, do so now. elif not cc_no: # CC not set computer.cost_centre_no = cc computer.save() logger.info('Computer {} cost centre set to {}.'.format(computer, cc)) # Mobile qs = Mobile.objects.filter(serial_number=serial) if qs.count() > 1: failures.append('Serial no {} for row {} matches >1 mobile in the db'.format( serial, row_num)) logger.warn('Serial no {} for row {} matches >1 mobile in the db'.format( serial, row_num)) elif qs.exists(): mobile = Mobile.objects.get(serial_number=serial) cc_no = mobile.cost_centre_no if cc_no and cc_no != cc: failures.append('''Mobile with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) logger.warn('''Mobile with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) # If CC is not set, do so now. elif not cc_no: # CC not set mobile.cost_centre_no = cc mobile.save() logger.info('Mobile {} cost centre set to {}.'.format(mobile, cc)) # Serial number couldn't be matched to any object. if not computer and not mobile: failures.append('''Serial no {} for row {} does not match any single computer or mobile in the db'''.format(serial, row_num)) logger.warn('''Serial no {} for row {} does not match any single computer or mobile in the db'''.format(serial, row_num)) for f in failures: LprValidation.objects.create(lpr=lpr, row=row_num, validation=f) """ # Check uploaded OIM asset no. value. if serial and computer and sheet.cell(i, 3).value: new_val = str(sheet.cell(i, 3).value) if len(new_val) > 64: # Asset no. too long. err_messages.append('''OIM asset number {} for serial no. {} is invalid - please correct the data.'''.format( new_val, serial)) new_val = None if new_val and computer.asset_id and new_val != computer.asset_id: old_val = computer.asset_id computer.asset_id = new_val logger.info('''OIM asset number updated from {} to {}.'''.format(old_val, new_val)) elif new_val and not computer.asset_id: # Asset no not set. computer.asset_id = new_val logger.info('''OIM asset number set to {}.'''.format( new_val)) # Check uploaded finance asset no. value. if serial and computer and sheet.cell(i, 4).value: new_val = sheet.cell(i, 4).value # Handle Excel weirdness: 'integer' cell values will be floats # here. If so, turn the value into an integer, then a string. if isinstance(new_val, float): new_val = str(int(new_val)) if len(new_val) > 64: # Finance asset no. too long. err_messages.append('''Finance asset number {} for serial no. {} is invalid - please correct the data.'''.format( new_val, serial)) new_val = None if new_val and computer.finance_asset_id and new_val != computer.finance_asset_id: old_val = computer.finance_asset_id computer.finance_asset_id = new_val logger.info('''Finance asset number updated from {} to {}.'''.format(old_val, new_val)) elif new_val and not computer.finance_asset_id: # Asset no not set. computer.finance_asset_id = new_val logger.info('''Finance asset number set to {}.'''.format( new_val)) if serial and computer: if err_messages: computer.validation_notes = ' '.join(err_messages) else: computer.validation_notes = 'No errors' computer.save() """ # TODO: email the uploading user a list of any validation errors. return True
def freshdesk_sync_contacts(contacts=None, companies=None, agents=None): """Iterate through all DepartmentUser objects, and ensure that each user's information is synced correctly to a Freshdesk contact. May optionally be passed in dicts of contacts & companies. """ logger = logger_setup('freshdesk_sync_contacts') try: if not contacts: logger.info('Querying Freshdesk for current contacts') contacts = get_freshdesk_objects(obj_type='contacts', progress=False) contacts = {c['email'].lower(): c for c in contacts if c['email']} if not companies: logger.info('Querying Freshdesk for current companies') companies = get_freshdesk_objects(obj_type='companies', progress=False) companies = {c['name']: c for c in companies} if not agents: logger.info('Querying Freshdesk for current agents') agents = get_freshdesk_objects(obj_type='agents', progress=False) agents = { a['contact']['email'].lower(): a['contact'] for a in agents if a['contact']['email'] } except Exception as e: logger.exception(e) return False # Filter DepartmentUsers: valid email (contains @), not -admin, DN contains 'OU=Users', active d_users = DepartmentUser.objects.filter( email__contains='@', ad_dn__contains='OU=Users', active=True).exclude(email__contains='-admin') logger.info('Syncing details for {} DepartmentUsers to Freshdesk'.format( d_users.count())) for user in d_users: if user.email.lower() in contacts: # The DepartmentUser exists in Freshdesk; verify and update details. fd = contacts[user.email.lower()] data = {} user_sync = False # use extra attributes from org_data, if available cost_centre = user.org_data.get('cost_centre', {}).get( 'code', '') if user.org_data else None try: cost_centre = int( cost_centre ) # The cost_centre custom field in FD must be an integer. except: cost_centre = None physical_location = user.org_data.get('location', {}).get( 'name', '') if user.org_data else None department = user.org_data.get('units', []) if user.org_data else [] department = department[0].get('name', '') if len(department) > 0 else None changes = [] if user.name != fd['name']: user_sync = True data['name'] = user.name changes.append('name') if user.telephone != fd['phone']: user_sync = True data['phone'] = user.telephone changes.append('phone') if user.title != fd['job_title']: user_sync = True data['job_title'] = user.title changes.append('job_title') if department and department in companies and fd[ 'company_id'] != companies[department]['id']: user_sync = True data['company_id'] = companies[department]['id'] changes.append('company_id') # Custom fields in Freshdesk: Cost Centre no. if 'custom_fields' in fd: # Field may not exist in the API obj. if cost_centre and fd['custom_fields'][ 'cost_centre'] != cost_centre: user_sync = True data['custom_fields'] = {'cost_centre': cost_centre} changes.append('cost_centre') # Custom fields in Freshdesk: Physical location if physical_location and fd['custom_fields'][ 'location'] != physical_location: user_sync = True if 'custom_fields' in data: data['custom_fields']['location'] = physical_location else: data['custom_fields'] = {'location': physical_location} changes.append('physical_location') if user_sync: # Sync user details to their Freshdesk contact. r = update_freshdesk_object('contacts', data, fd['id']) if r.status_code == 403: # Forbidden # A 403 response probably means that we hit the API throttle limit. # Abort the synchronisation. logger.error( 'HTTP403 received from Freshdesk API, aborting') return False logger.info( '{} was updated in Freshdesk (status {}), changed: {}'. format(user.email.lower(), r.status_code, ', '.join(changes))) else: logger.info('{} already up to date in Freshdesk'.format( user.email.lower())) elif user.email.lower() in agents: # The DepartmentUser is an agent; skip (can't update Agent details via the API). logger.info('{} is an agent, skipping sync'.format( user.email.lower())) continue else: # The DepartmentUser does not exist in Freshdesk; create them as a Contact. data = { 'name': user.name, 'email': user.email.lower(), 'phone': user.telephone, 'job_title': user.title } department = user.org_data.get('units', []) if user.org_data else [] department = department[0].get('name', '') if len(department) > 0 else None if department and department in companies: data['company_id'] = companies[department]['id'] r = update_freshdesk_object('contacts', data) if not r.status_code == 200: # Error, unable to process request. logger.warn('{} not created in Freshdesk (status {})'.format( user.email.lower(), r.status_code)) else: logger.info('{} created in Freshdesk (status {})'.format( user.email.lower(), r.status_code)) return True
def freshdesk_cache_tickets(tickets=None): """Cache passed-in list of Freshdesk tickets in the database. If no tickets are passed in, query the API for the newest tickets. """ logger = logger_setup('freshdesk_cache_tickets') if not tickets: try: logger.info('Querying Freshdesk for current tickets') tickets = get_freshdesk_objects(obj_type='tickets', progress=False, limit=30) except Exception as e: logger.exception(e) return False # Tweak the passed-in list of ticket values, prior to caching. for t in tickets: # Rename key 'id'. t['ticket_id'] = t.pop('id') # Parse ISO8601-formatted date strings into datetime objs. t['created_at'] = parse(t['created_at']) t['due_by'] = parse(t['due_by']) t['fr_due_by'] = parse(t['fr_due_by']) t['updated_at'] = parse(t['updated_at']) # Pop unused fields from the dict. t.pop('company_id', None) t.pop('email_config_id', None) t.pop('product_id', None) created, updated = 0, 0 # Iterate through tickets; determine if a cached FreshdeskTicket should be # created or updated. for t in tickets: try: ft, create = FreshdeskTicket.objects.update_or_create( ticket_id=t['ticket_id'], defaults=t) if create: logger.info('{} created'.format(ft)) created += 1 else: logger.info('{} updated'.format(ft)) updated += 1 # Sync contact objects (requester and responder). # Check local cache first, to reduce the no. of API calls. if ft.requester_id: if FreshdeskContact.objects.filter( contact_id=ft.requester_id).exists(): ft.freshdesk_requester = FreshdeskContact.objects.get( contact_id=ft.requester_id) else: # Attempt to cache the Freshdesk contact. try: c = get_freshdesk_object(obj_type='contacts', id=ft.requester_id) c['contact_id'] = c.pop('id') c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) c.pop('avatar', None) c.pop('company_id', None) c.pop('twitter_id', None) c.pop('deleted', None) con = FreshdeskContact.objects.create(**c) logger.info('Created {}'.format(con)) ft.freshdesk_requester = con except HTTPError: # The GET might fail if the contact is an agent. logger.error( 'HTTP 404 Freshdesk contact not found: {}'.format( ft.requester_id)) pass if ft.responder_id: if FreshdeskContact.objects.filter( contact_id=ft.responder_id).exists(): ft.freshdesk_responder = FreshdeskContact.objects.get( contact_id=ft.responder_id) else: # Attempt to cache the Freshdesk contact. try: c = get_freshdesk_object(obj_type='contacts', id=ft.responder_id) c['contact_id'] = c.pop('id') c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) c.pop('avatar', None) c.pop('company_id', None) c.pop('twitter_id', None) c.pop('deleted', None) con = FreshdeskContact.objects.create(**c) logger.info('Created {}'.format(con)) ft.freshdesk_responder = con except HTTPError: # The GET might fail if the contact is an agent. logger.error( 'HTTP 404 Freshdesk contact not found: {}'.format( ft.responder_id)) pass ft.save() # Try matching the ticket to an ITSystem object. ft.match_it_system() # Sync ticket conversation objects. obj = 'tickets/{}/conversations'.format(t['ticket_id']) convs = get_freshdesk_objects(obj_type=obj, progress=False) for c in convs: # Rename key 'id'. c['conversation_id'] = c.pop('id') # Date ISO8601-formatted date strings into datetimes. c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) # Pop unused fields from the dict. c.pop('bcc_emails', None) c.pop('support_email', None) fc, create = FreshdeskConversation.objects.update_or_create( conversation_id=c['conversation_id'], defaults=c) if create: logger.info('{} created'.format(fc)) else: logger.info('{} updated'.format(fc)) # Link parent ticket, DepartmentUser, etc. fc.freshdesk_ticket = ft if FreshdeskContact.objects.filter( contact_id=fc.user_id).exists(): fc.freshdesk_contact = FreshdeskContact.objects.get( contact_id=fc.user_id) else: # Attempt to cache the Freshdesk contact. try: f_con = get_freshdesk_object(obj_type='contacts', id=fc.user_id) f_con['contact_id'] = f_con.pop('id') f_con['created_at'] = parse(f_con['created_at']) f_con['updated_at'] = parse(f_con['updated_at']) f_con.pop('avatar', None) f_con.pop('company_id', None) f_con.pop('twitter_id', None) f_con.pop('deleted', None) contact = FreshdeskContact.objects.create(**f_con) logger.info('Created {}'.format(contact)) fc.freshdesk_contact = contact # Attempt to match contact with a DepartmentUser. contact.match_dept_user() except HTTPError: # The GET might fail if the contact is an agent. logger.error( 'HTTP 404 Freshdesk contact not found: {}'.format( ft.requester_id)) pass # Attempt to match conversation with a DepartmentUser. if fc.freshdesk_contact and DepartmentUser.objects.filter( email__iexact=fc.freshdesk_contact.email).exists(): fc.du_user = DepartmentUser.objects.get( email__iexact=fc.freshdesk_contact.email) fc.save() except Exception as e: logger.exception(e) return False logger.info('Ticket sync: {} created, {} updated'.format(created, updated)) print('{} created, {} updated'.format(created, updated)) return True
def pdq_load_hardware(): """Update the database with Computer information from PDQ Inventory. Also create matching Hardware objects in the register application, if required. """ from registers.models import Hardware logger = logger_setup('pdq_load_computers') logger_ex = logger_setup('exceptions_pdq_load_computers') update_time = timezone.now() csv_path = os.path.join(os.environ.get('PDQ_INV_PATH'), 'pdq_computers.csv') data = csv_data(csv_path) num_created = 0 num_updated = 0 num_skipped = 0 for row in data: computer = None try: urn = UUID(row[2]).urn except Exception as e: logger_ex.error('Computer {} has invalid Active Directory GUID in PDQ Inventory {}, skipping.'.format(row[1], row[2])) logger_ex.info(row) logger_ex.exception(e) num_skipped += 1 continue # First, try and match AD GUID. if urn and row[2]: try: computer = Computer.objects.get(ad_guid=urn) num_updated += 1 except Computer.DoesNotExist: pass # Second, try and match AD DN. if computer is None and row[3]: try: computer = Computer.objects.get(ad_dn=row[3]) num_updated += 1 except Computer.DoesNotExist: pass # Third, try to match via pdq_id value. if computer is None and row[0]: try: computer = Computer.objects.get(pdq_id=row[0]) num_updated += 1 except Computer.DoesNotExist: pass # Last, try to match via sAMAccountName. If no match, skip the record. if computer is None: sam = '{}$'.format(row[1].upper()) try: computer = Computer.objects.get(sam_account_name=sam) num_updated += 1 except Computer.DoesNotExist: logger.info('No match for Computer object with SAM ID {} creating new object'.format(sam)) computer = Computer(sam_account_name=sam) num_created += 1 pass computer.domain_bound = True computer.hostname = row[17] computer.pdq_id = int(row[0]) computer.ad_guid = urn computer.ad_dn = row[3] computer.manufacturer = row[5] computer.model = row[6] computer.chassis = row[7] computer.serial_number = row[8] computer.os_name = row[9] computer.os_version = row[10] computer.os_service_pack = row[11] computer.os_arch = row[12] computer.cpu = row[13] computer.cpu_count = row[14] computer.cpu_cores = row[15] computer.memory = row[16] computer.date_pdq_updated = update_time computer.save() logger.info('Computer {} updated from PDQ Inventory scan data'.format(computer)) try: hw = computer.hardware except Hardware.DoesNotExist: # Check if the host already exists. if Hardware.objects.filter(name__icontains=computer.hostname).exists(): hw = Hardware.objects.get(name__icontains=computer.hostname) hw.computer = computer hw.save() else: hw = Hardware.objects.create(device_type=3, computer=computer, name=computer.hostname) logger.info('Created {}, updated {}, skipped {}'.format(num_created, num_updated, num_skipped))
def freshdesk_sync_contacts(contacts=None, companies=None, agents=None): """Iterate through all DepartmentUser objects, and ensure that each user's information is synced correctly to a Freshdesk contact. May optionally be passed in dicts of contacts & companies. """ logger = logger_setup('freshdesk_sync_contacts') try: if not contacts: logger.info('Querying Freshdesk for current contacts') contacts = get_freshdesk_objects(obj_type='contacts', progress=False) contacts = {c['email'].lower(): c for c in contacts if c['email']} if not companies: logger.info('Querying Freshdesk for current companies') companies = get_freshdesk_objects(obj_type='companies', progress=False) companies = {c['name']: c for c in companies} if not agents: logger.info('Querying Freshdesk for current agents') agents = get_freshdesk_objects(obj_type='agents', progress=False) agents = {a['contact']['email'].lower(): a['contact'] for a in agents if a['contact']['email']} except Exception as e: logger.exception(e) return False # Filter DepartmentUsers: valid email (contains @), not -admin, DN contains 'OU=Users' d_users = DepartmentUser.objects.filter(email__contains='@', ad_dn__contains='OU=Users').exclude(email__contains='-admin') logger.info('Syncing details for {} DepartmentUsers to Freshdesk'.format(d_users.count())) for user in d_users: if user.email.lower() in contacts: # The DepartmentUser exists in Freshdesk; verify and update details. fd = contacts[user.email.lower()] data = {} user_sync = False # use extra attributes from org_data, if available cost_centre = user.org_data.get('cost_centre', {}).get('code', "") if user.org_data else "" physical_location = user.org_data.get('location', {}).get('name', "") if user.org_data else "" department = user.org_data.get('units', []) if user.org_data else [] department = department[0].get('name', "") if len(department) > 0 else "" changes = [] if user.name != fd['name']: user_sync = True data['name'] = user.name changes.append('name') if user.telephone != fd['phone']: user_sync = True data['phone'] = user.telephone changes.append('phone') if user.title != fd['job_title']: user_sync = True data['job_title'] = user.title changes.append('job_title') if department and department in companies and fd['company_id'] != companies[department]['id']: user_sync = True data['company_id'] = companies[department]['id'] changes.append('company_id') # Custom fields in Freshdesk: Cost Centre no. if 'custom_field' in fd: # Field may not exist in the API obj. if fd['custom_field']['cf_cost_centre'] != cost_centre: user_sync = True data['custom_field'] = {'cf_cost_centre': cost_centre} changes.append('cost_centre') # Custom fields in Freshdesk: Physical location if fd['custom_field']['cf_location'] != physical_location: user_sync = True if 'custom_field' in data: data['custom_field']['cf_location'] = physical_location else: data['custom_field'] = {'cf_location': physical_location} changes.append('physical_location') if user_sync: # Sync user details to their Freshdesk contact. r = update_freshdesk_object('contacts', data, fd['id']) if r.status_code == 403: # Forbidden # A 403 response probably means that we hit the API throttle limit. # Abort the synchronisation. logger.error('HTTP403 received from Freshdesk API, aborting') return False logger.info('{} was updated in Freshdesk (status {}), changed: {}'.format( user.email.lower(), r.status_code, ', '.join(changes))) else: logger.info('{} already up to date in Freshdesk'.format(user.email.lower())) elif user.email.lower() in agents: # The DepartmentUser is an agent; skip (can't update Agent details via the API). logger.info('{} is an agent, skipping sync'.format(user.email.lower())) continue else: # The DepartmentUser does not exist in Freshdesk; create them as a Contact. data = {'name': user.name, 'email': user.email.lower(), 'phone': user.telephone, 'job_title': user.title} if department and department in companies: data['company_id'] = companies[department]['id'] r = update_freshdesk_object('contacts', data) if not r.status_code == 200: # Error, unable to process request. logger.warn('{} not created in Freshdesk (status {})'.format(user.email.lower(), r.status_code)) else: logger.info('{} created in Freshdesk (status {})'.format(user.email.lower(), r.status_code)) return True
def freshdesk_cache_tickets(tickets=None): """Cache passed-in list of Freshdesk tickets in the database. If no tickets are passed in, query the API for the newest tickets. """ logger = logger_setup('freshdesk_cache_tickets') if not tickets: try: logger.info('Querying Freshdesk for current tickets') tickets = get_freshdesk_objects(obj_type='tickets', progress=False, limit=30) except Exception as e: logger.exception(e) return False # Tweak the passed-in list of ticket values, prior to caching. for t in tickets: # Rename key 'id'. t['ticket_id'] = t.pop('id') # Parse ISO8601-formatted date strings into datetime objs. t['created_at'] = parse(t['created_at']) t['due_by'] = parse(t['due_by']) t['fr_due_by'] = parse(t['fr_due_by']) t['updated_at'] = parse(t['updated_at']) # Pop unused fields from the dict. t.pop('company_id', None) t.pop('email_config_id', None) t.pop('product_id', None) created, updated = 0, 0 # Iterate through tickets; determine if a cached FreshdeskTicket should be # created or updated. for t in tickets: try: ft, create = FreshdeskTicket.objects.update_or_create(ticket_id=t['ticket_id'], defaults=t) if create: logger.info('{} created'.format(ft)) created += 1 else: logger.info('{} updated'.format(ft)) updated += 1 # Sync contact objects (requester and responder). # Check local cache first, to reduce the no. of API calls. if ft.requester_id: if FreshdeskContact.objects.filter(contact_id=ft.requester_id).exists(): ft.freshdesk_requester = FreshdeskContact.objects.get(contact_id=ft.requester_id) else: # Attempt to cache the Freshdesk contact. try: c = get_freshdesk_object(obj_type='contacts', id=ft.requester_id) c['contact_id'] = c.pop('id') c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) c.pop('avatar', None) c.pop('company_id', None) c.pop('twitter_id', None) c.pop('deleted', None) con = FreshdeskContact.objects.create(**c) logger.info('Created {}'.format(con)) ft.freshdesk_requester = con except HTTPError: # The GET might fail if the contact is an agent. logger.error('HTTP 404 Freshdesk contact not found: {}'.format(ft.requester_id)) pass if ft.responder_id: if FreshdeskContact.objects.filter(contact_id=ft.responder_id).exists(): ft.freshdesk_responder = FreshdeskContact.objects.get(contact_id=ft.responder_id) else: # Attempt to cache the Freshdesk contact. try: c = get_freshdesk_object(obj_type='contacts', id=ft.responder_id) c['contact_id'] = c.pop('id') c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) c.pop('avatar', None) c.pop('company_id', None) c.pop('twitter_id', None) c.pop('deleted', None) con = FreshdeskContact.objects.create(**c) logger.info('Created {}'.format(con)) ft.freshdesk_responder = con except HTTPError: # The GET might fail if the contact is an agent. logger.error('HTTP 404 Freshdesk contact not found: {}'.format(ft.responder_id)) pass ft.save() # Try matching the ticket to an ITSystem object. ft.match_it_system() # Sync ticket conversation objects. obj = 'tickets/{}/conversations'.format(t['ticket_id']) convs = get_freshdesk_objects(obj_type=obj, progress=False) for c in convs: # Rename key 'id'. c['conversation_id'] = c.pop('id') # Date ISO8601-formatted date strings into datetimes. c['created_at'] = parse(c['created_at']) c['updated_at'] = parse(c['updated_at']) # Pop unused fields from the dict. c.pop('bcc_emails', None) c.pop('support_email', None) fc, create = FreshdeskConversation.objects.update_or_create(conversation_id=c['conversation_id'], defaults=c) if create: logger.info('{} created'.format(fc)) else: logger.info('{} updated'.format(fc)) # Link parent ticket, DepartmentUser, etc. fc.freshdesk_ticket = ft if FreshdeskContact.objects.filter(contact_id=fc.user_id).exists(): fc.freshdesk_contact = FreshdeskContact.objects.get(contact_id=fc.user_id) else: # Attempt to cache the Freshdesk contact. try: f_con = get_freshdesk_object(obj_type='contacts', id=fc.user_id) f_con['contact_id'] = f_con.pop('id') f_con['created_at'] = parse(f_con['created_at']) f_con['updated_at'] = parse(f_con['updated_at']) f_con.pop('avatar', None) f_con.pop('company_id', None) f_con.pop('twitter_id', None) f_con.pop('deleted', None) contact = FreshdeskContact.objects.create(**f_con) logger.info('Created {}'.format(contact)) fc.freshdesk_contact = contact # Attempt to match contact with a DepartmentUser. contact.match_dept_user() except HTTPError: # The GET might fail if the contact is an agent. logger.error('HTTP 404 Freshdesk contact not found: {}'.format(ft.requester_id)) pass # Attempt to match conversation with a DepartmentUser. if fc.freshdesk_contact and DepartmentUser.objects.filter(email__iexact=fc.freshdesk_contact.email).exists(): fc.du_user = DepartmentUser.objects.get(email__iexact=fc.freshdesk_contact.email) fc.save() except Exception as e: logger.exception(e) return False logger.info('Ticket sync: {} created, {} updated'.format(created, updated)) print('{} created, {} updated'.format(created, updated)) return True
def freshdesk_sync_contacts(contacts=None, companies=None, agents=None): """Iterate through all DepartmentUser objects, and ensure that each user's information is synced correctly to a Freshdesk contact. May optionally be passed in dicts of contacts & companies. """ logger = logger_setup('freshdesk_sync_contacts') try: if not contacts: logger.info('Querying Freshdesk for current contacts') contacts = get_freshdesk_users(progress=True) if not companies: logger.info('Querying Freshdesk for current companies') companies = get_freshdesk_companies(progress=False) if not agents: logger.info('Querying Freshdesk for current agents') agents = get_freshdesk_agents(progress=False) except Exception as e: send_mail('Freshdesk "all contacts" API query failed: {}'.format( socket.gethostname()), '''Freshdesk "all contacts" query failed for host {}, ''' '''project directory {}\nException:\n{}'''.format( socket.gethostname(), os.path.dirname(__file__), e), '*****@*****.**', list(settings.ADMINS), fail_silently=False) logger.exception(e) return False # Filter DepartmentUsers: valid email (contains @), not -admin, DN contains 'OU=Users' for user in DepartmentUser.objects.filter( email__contains='@', ad_dn__contains='OU=Users').exclude(email__contains='-admin'): if user.email.lower() in contacts: # The DepartmentUser exists in Freshdesk; verify and update details. fd = contacts[user.email.lower()] data = {} user_sync = False # use extra attributes from org_data, if available cost_centre = user.org_data.get('cost_centre', {}).get( 'code', "") if user.org_data else "" physical_location = user.org_data.get('location', {}).get( 'name', "") if user.org_data else "" department = user.org_data.get('units', []) if user.org_data else [] department = department[0].get('name', "") if len(department) > 0 else "" changes = [] if user.name != fd['name']: user_sync = True data['name'] = user.name changes.append('name') if user.telephone != fd['phone']: user_sync = True data['phone'] = user.telephone changes.append('phone') if user.title != fd['job_title']: user_sync = True data['job_title'] = user.title changes.append('job_title') if department and department in companies and fd[ 'company_id'] != companies[department]['id']: user_sync = True data['company_id'] = companies[department]['id'] changes.append('company_id') # Custom fields in Freshdesk: Cost Centre no. if fd['custom_field']['cf_cost_centre'] != cost_centre: user_sync = True data['custom_field'] = {'cf_cost_centre': cost_centre} changes.append('cost_centre') # Custom fields in Freshdesk: Physical location if fd['custom_field']['cf_location'] != physical_location: user_sync = True if 'custom_field' in data: data['custom_field']['cf_location'] = physical_location else: data['custom_field'] = {'cf_location': physical_location} changes.append('physical_location') if user_sync: # Sync user details to their Freshdesk contact. resp = freshdesk_contact_update(fd['id'], data) if resp.status_code == 403: # Forbidden # A 403 response probably means that we hit the API throttle limit. # Abort the synchronisation. logger.error( 'HTTP403 received from Freshdesk API, aborting') return False logger.info( '{} was updated in Freshdesk (status {}), changed: {}'. format(user.email.lower(), resp.status_code, ', '.join(changes))) else: logger.info('{} already up to date in Freshdesk'.format( user.email.lower())) elif user.email.lower() in agents: # The DepartmentUser is an agent; skip (can't update Agent details via the API). logger.info('{} is an agent, skipping sync'.format( user.email.lower())) continue else: # The DepartmentUser does not exist in Freshdesk; create them as a Contact. data = { 'name': user.name, 'email': user.email.lower(), 'phone': user.telephone, 'job_title': user.title } if department and department in companies: data['company_id'] = companies[department]['id'] resp = freshdesk_contact_create(data) if not resp.status_code == 200: # Error, unable to process request. logger.warn('{} not created in Freshdesk (status {})'.format( user.email.lower(), resp.status_code)) else: logger.info('{} created in Freshdesk (status {})'.format( user.email.lower(), resp.status_code)) return True
def validate_lpr(lpr_id): from tracking.utils import logger_setup # Avoid circular import. logger = logger_setup('task_validate_lpr') """Async task that accepts a LocalPropertyRegister object, and then iterates through the uploaded spreadsheet to validate data and record any failures in the Incredibus database. """ try: lpr = LocalPropertyRegister.objects.get(pk=lpr_id) except ObjectDoesNotExist: logger.error( 'LocalPropertyRegister object not found ({})'.format(lpr_id)) return False # Read source data into an iterable, prior to validation. # Data standard: # - Uploaded file must be XLS, XLSX or CSV (validated by upload form). # - [Excel] Data to be validated is on the first worksheet. # - The first row of data is always a header row (skipped). # - Column 1 contains device serial numbers, in text format. # - Column 2 contains the assigned cost centre number, in integer format. logger.info('Handling {}'.format(lpr.uploaded_file.name)) # Use the csv standard lib for CSV files. if os.path.splitext(lpr.uploaded_file.name)[1] == '.csv': rows = sum(1 for line in open(lpr.uploaded_file.path)) data = csv_data(lpr.uploaded_file.path) data.next() # Skip header row. data = [i for i in data] # Use the openpyxl lib for XLSX files elif os.path.splitext(lpr.uploaded_file.name)[1] == '.xlsx': workbook = load_workbook(filename=lpr.uploaded_file.path, read_only=True) sheet = workbook.active # Sheet index 0. data = [] for row in sheet.rows: data.append([cell.value for cell in row]) data.pop(0) # Pop out the header row. # Use the xlrd lib for XLS files. else: workbook = open_workbook(lpr.uploaded_file.path) sheet = workbook.sheet_by_index(0) rows = sheet.nrows data = [] for i in range(1, rows): # Skip header row. data.append([sheet.cell(i, 0).value, sheet.cell(i, 1).value]) # Row validation rules: # * Column 1 is a serial number. # * Column 1 cannot be empty/blank. # * Column 1 has a maximum length of 128 characters. # * Column 2 is a cost centre number. # * Column 2 cannot be empty/blank. # * Column 2 must be interpreted as an integer value. for row_num, row in enumerate(data, 2): # Note that we start row_num at 2 to make sense for validation messages. failures = [] # Serial number column if not row[0]: failures.append( 'Serial number is blank for row {}'.format(row_num)) logger.warn('Serial number is blank for row {}'.format(row_num)) else: # Account for text-encodings (Latin1), trailing spaces, etc. serial = row[0].replace(u'\xa0', u' ') serial = unidecode(serial).strip().upper() if len(serial) > 128: failures.append('Serial number is greater than 128 characters') logger.warn( 'Serial number is >128 chars for row {}'.format(row_num)) # Cost centre column if not row[1]: failures.append('Cost centre is blank for row {}'.format(row_num)) logger.warn('Cost centre is blank for row {}'.format(row_num)) else: # Account for text-encodings (Latin1), trailing spaces, etc. cc = unidecode(str(row[1])).strip() try: cc = int(cc) except ValueError: # CC can't be cast as integer. failures.append('Cost centre {} is invalid for row {}'.format( cc, row_num)) logger.warn('Cost centre {} is invalid for row {}'.format( cc, row_num)) # Fatal validation errors: log any failures and skip to the next row. if failures: for f in failures: LprValidation.objects.create(lpr=lpr, row=row_num, validation=f) continue # No validation errors so far; try matching a Computer or Mobile. computer = None mobile = None # Computer qs = Computer.objects.filter(serial_number=serial) if qs.count() > 1: failures.append( 'Serial no {} for row {} matches >1 computer in the db'.format( serial, row_num)) logger.warn( 'Serial no {} for row {} matches >1 computer in the db'.format( serial, row_num)) elif qs.exists(): computer = Computer.objects.get(serial_number=serial) cc_no = computer.cost_centre_no if cc_no and cc_no != cc: # Already assigned to a CC, <> the CC supplied. failures.append('''Computer with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''. format(serial, row_num, cc_no, cc)) logger.warn('''Computer with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) # If CC is not set, do so now. elif not cc_no: # CC not set computer.cost_centre_no = cc computer.save() logger.info('Computer {} cost centre set to {}.'.format( computer, cc)) # Mobile qs = Mobile.objects.filter(serial_number=serial) if qs.count() > 1: failures.append( 'Serial no {} for row {} matches >1 mobile in the db'.format( serial, row_num)) logger.warn( 'Serial no {} for row {} matches >1 mobile in the db'.format( serial, row_num)) elif qs.exists(): mobile = Mobile.objects.get(serial_number=serial) cc_no = mobile.cost_centre_no if cc_no and cc_no != cc: failures.append('''Mobile with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) logger.warn('''Mobile with serial {} for row {} is assigned to cost centre {}; cannot reassign to {}'''.format( serial, row_num, cc_no, cc)) # If CC is not set, do so now. elif not cc_no: # CC not set mobile.cost_centre_no = cc mobile.save() logger.info('Mobile {} cost centre set to {}.'.format( mobile, cc)) # Serial number couldn't be matched to any object. if not computer and not mobile: failures.append('''Serial no {} for row {} does not match any single computer or mobile in the db'''.format(serial, row_num)) logger.warn('''Serial no {} for row {} does not match any single computer or mobile in the db'''.format(serial, row_num)) for f in failures: LprValidation.objects.create(lpr=lpr, row=row_num, validation=f) """ # Check uploaded OIM asset no. value. if serial and computer and sheet.cell(i, 3).value: new_val = str(sheet.cell(i, 3).value) if len(new_val) > 64: # Asset no. too long. err_messages.append('''OIM asset number {} for serial no. {} is invalid - please correct the data.'''.format( new_val, serial)) new_val = None if new_val and computer.asset_id and new_val != computer.asset_id: old_val = computer.asset_id computer.asset_id = new_val logger.info('''OIM asset number updated from {} to {}.'''.format(old_val, new_val)) elif new_val and not computer.asset_id: # Asset no not set. computer.asset_id = new_val logger.info('''OIM asset number set to {}.'''.format( new_val)) # Check uploaded finance asset no. value. if serial and computer and sheet.cell(i, 4).value: new_val = sheet.cell(i, 4).value # Handle Excel weirdness: 'integer' cell values will be floats # here. If so, turn the value into an integer, then a string. if isinstance(new_val, float): new_val = str(int(new_val)) if len(new_val) > 64: # Finance asset no. too long. err_messages.append('''Finance asset number {} for serial no. {} is invalid - please correct the data.'''.format( new_val, serial)) new_val = None if new_val and computer.finance_asset_id and new_val != computer.finance_asset_id: old_val = computer.finance_asset_id computer.finance_asset_id = new_val logger.info('''Finance asset number updated from {} to {}.'''.format(old_val, new_val)) elif new_val and not computer.finance_asset_id: # Asset no not set. computer.finance_asset_id = new_val logger.info('''Finance asset number set to {}.'''.format( new_val)) if serial and computer: if err_messages: computer.validation_notes = ' '.join(err_messages) else: computer.validation_notes = 'No errors' computer.save() """ # TODO: email the uploading user a list of any validation errors. return True
def freshdesk_sync_contacts(contacts=None, companies=None, agents=None): """Iterate through all DepartmentUser objects, and ensure that each user's information is synced correctly to a Freshdesk contact. May optionally be passed in dicts of contacts & companies. """ logger = logger_setup('freshdesk_sync_contacts') try: if not contacts: logger.info('Querying Freshdesk for current contacts') contacts = get_freshdesk_users(progress=True) if not companies: logger.info('Querying Freshdesk for current companies') companies = get_freshdesk_companies(progress=False) if not agents: logger.info('Querying Freshdesk for current agents') agents = get_freshdesk_agents(progress=False) except Exception as e: send_mail( 'Freshdesk "all contacts" API query failed: {}'.format(socket.gethostname()), '''Freshdesk "all contacts" query failed for host {}, ''' '''project directory {}\nException:\n{}'''.format( socket.gethostname(), os.path.dirname(__file__), e), '*****@*****.**', list(settings.ADMINS), fail_silently=False) logger.exception(e) return False # Filter DepartmentUsers: valid email (contains @), not -admin, DN contains 'OU=Users' for user in DepartmentUser.objects.filter(email__contains='@', ad_dn__contains='OU=Users').exclude(email__contains='-admin'): if user.email.lower() in contacts: # The DepartmentUser exists in Freshdesk; verify and update details. fd = contacts[user.email.lower()] data = {} user_sync = False # use extra attributes from org_data, if available cost_centre = user.org_data.get('cost_centre', {}).get('code', "") if user.org_data else "" physical_location = user.org_data.get('location', {}).get('name', "") if user.org_data else "" department = user.org_data.get('units', []) if user.org_data else [] department = department[0].get('name', "") if len(department) > 0 else "" changes = [] if user.name != fd['name']: user_sync = True data['name'] = user.name changes.append('name') if user.telephone != fd['phone']: user_sync = True data['phone'] = user.telephone changes.append('phone') if user.title != fd['job_title']: user_sync = True data['job_title'] = user.title changes.append('job_title') if department and department in companies and fd['company_id'] != companies[department]['id']: user_sync = True data['company_id'] = companies[department]['id'] changes.append('company_id') # Custom fields in Freshdesk: Cost Centre no. if fd['custom_field']['cf_cost_centre'] != cost_centre: user_sync = True data['custom_field'] = {'cf_cost_centre': cost_centre} changes.append('cost_centre') # Custom fields in Freshdesk: Physical location if fd['custom_field']['cf_location'] != physical_location: user_sync = True if 'custom_field' in data: data['custom_field']['cf_location'] = physical_location else: data['custom_field'] = {'cf_location': physical_location} changes.append('physical_location') if user_sync: # Sync user details to their Freshdesk contact. resp = freshdesk_contact_update(fd['id'], data) if resp.status_code == 403: # Forbidden # A 403 response probably means that we hit the API throttle limit. # Abort the synchronisation. logger.error('HTTP403 received from Freshdesk API, aborting') return False logger.info('{} was updated in Freshdesk (status {}), changed: {}'.format( user.email.lower(), resp.status_code, ', '.join(changes))) else: logger.info('{} already up to date in Freshdesk'.format(user.email.lower())) elif user.email.lower() in agents: # The DepartmentUser is an agent; skip (can't update Agent details via the API). logger.info('{} is an agent, skipping sync'.format(user.email.lower())) continue else: # The DepartmentUser does not exist in Freshdesk; create them as a Contact. data = {'name': user.name, 'email': user.email.lower(), 'phone': user.telephone, 'job_title': user.title} if department and department in companies: data['company_id'] = companies[department]['id'] resp = freshdesk_contact_create(data) if not resp.status_code == 200: # Error, unable to process request. logger.warn('{} not created in Freshdesk (status {})'.format(user.email.lower(), resp.status_code)) else: logger.info('{} created in Freshdesk (status {})'.format(user.email.lower(), resp.status_code)) return True