try: id_disposition = dic_disposition.get( ast_disposition.encode("utf-8"), 0) transdisposition = DISPOSITION_TRANSLATION[id_disposition] except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) accountcode = set_int_default(row[7], '') uniqueid = row[8] start_uepoch = datetime.fromtimestamp(int(row[1])) # Check Destination number destination_number = row[0] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Prepare global CDR cdr_record = { 'switch_id': switch.id, 'caller_id_number': callerid_number, 'caller_id_name': callerid_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': "inbound",
def setUp(self): """Create model object""" # AlertRemovePrefix model self.alert_remove_prefix = AlertRemovePrefix( label='test', prefix=32 ) self.alert_remove_prefix.save() self.assertEquals(self.alert_remove_prefix.__unicode__(), 'test') # Alarm model self.alarm = Alarm( name='Alarm name', period=1, type=1, alert_condition=1, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm.save() self.assertEquals(self.alarm.__unicode__(), 'Alarm name') self.alarm_new = Alarm( name='Alarm name new', period=1, type=1, alert_condition=2, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=1, type=1, alert_condition=3, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=1, type=1, alert_condition=4, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=1, type=1, alert_condition=5, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=1, type=1, alert_condition=6, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=2, type=1, alert_condition=3, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=2, type=1, alert_condition=4, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=3, type=1, alert_condition=5, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() self.alarm_new = Alarm( name='Alarm name new', period=3, type=1, alert_condition=6, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**' ) self.alarm_new.save() # AlarmReport model self.alarm_report = AlarmReport( alarm=self.alarm, calculatedvalue=10, status=1 ) self.alarm_report.save() self.assertEquals(self.alarm_report.__unicode__(), 'Alarm name') self.country = Country.objects.get(pk=198) # Blacklist model self.blacklist = Blacklist( phonenumber_prefix=32, country=self.country ) self.blacklist.save() self.assertTrue(self.blacklist.__unicode__()) # Whitelist model self.whitelist = Whitelist( phonenumber_prefix=32, country=self.country ) self.whitelist.save() self.assertTrue(self.whitelist.__unicode__()) chk_destination('9999787424')
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_contact.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] #TODO : Too many indentation in the code, refact, less if, for #TODO : respect DRY principale, some of the code is duplicate #from import tasks if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() \ if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = \ get_value_from_uni(j, row, 'caller_id_name') caller_id_number = \ get_value_from_uni(j, row, 'caller_id_number') direction = \ get_value_from_uni(j, row, 'direction') remote_media_ip = \ get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = \ get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = \ get_value_from_uni(j, row, 'end_uepoch') mduration = \ get_value_from_uni(j, row, 'mduration') billmsec = \ get_value_from_uni(j, row, 'billmsec') read_codec = \ get_value_from_uni(j, row, 'read_codec') write_codec = \ get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode': accountcode = int( request.POST[i + "_csv"]) if not accountcode: accountcode = int( get_cdr_from_row['accountcode']) # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row[ 'caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) hangup_cause_id = \ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(get_cdr_from_row['start_uepoch'])) destination_number = get_cdr_from_row[ 'destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination( destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { 'switch_id': int(request.POST['switch']), 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': 'CSV_IMPORT', 'cdr_object_id': '', 'country_id': country_id, 'authorized': authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[ settings.MG_CDR_COMMON] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find( query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.\ fromtimestamp(int(get_cdr_from_row['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic( daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic( daily_date, start_uepoch, switch.id, country_id, accountcode, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def func_importcdr_aggregate(shell, importcdr_handler, switch, ipaddress): """ function go through the current mongodb, then will - create CDR_COMMON - build the pre-aggregate """ #We limit the import tasks to a maximum - 1000 #This will reduce the speed but that s the only way to make sure #we dont have several time the same tasks running PAGE_SIZE = 1000 count_import = 0 local_count_import = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] result = importcdr_handler.find( { '$or': [{'import_cdr': {'$exists': False}}, {'import_cdr': 0}] }, { "callflow.caller_profile.caller_id_number": 1, "callflow.caller_profile.caller_id_name": 1, "callflow.caller_profile.destination_number": 1, "variables.duration": 1, "variables.billsec": 1, "variables.hangup_cause_q850": 1, "variables.accountcode": 1, "variables.direction": 1, "variables.uuid": 1, "variables.remote_media_ip": 1, "variables.start_uepoch": 1, #"variables.answer_uepoch": 1, #"variables.end_uepoch": 1, #"variables.mduration": 1, #"variables.billmsec": 1, #"variables.read_codec": 1, #"variables.write_codec": 1, "import_cdr_monthly": 1, "import_cdr_daily": 1, "import_cdr_hourly": 1, }).limit(PAGE_SIZE) #Retrieve FreeSWITCH CDRs for cdr in result: #find result so let's look later for more records start_uepoch = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # Check Destination number destination_number = cdr['callflow']['caller_profile']['destination_number'] if len(destination_number) <= settings.INTERNAL_CALL: authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] hangup_cause_id = get_hangupcause_id(cdr['variables']['hangup_cause_q850']) #Retrieve Element from CDR Object data_element = get_element(cdr) accountcode = data_element['accountcode'] remote_media_ip = data_element['remote_media_ip'] caller_id_number = data_element['caller_id_number'] caller_id_name = data_element['caller_id_name'] duration = data_element['duration'] billsec = data_element['billsec'] direction = data_element['direction'] uuid = data_element['uuid'] # Prepare global CDR cdr_record = { 'switch_id': switch.id, 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, #'answer_uepoch': answer_uepoch, #'end_uepoch': end_uepoch, #'mduration': cdr['variables']['mduration'], #'billmsec': cdr['variables']['billmsec'], #'read_codec': cdr['variables']['read_codec'], #'write_codec': cdr['variables']['write_codec'], 'cdr_type': CDR_TYPE["freeswitch"], 'cdr_object_id': cdr['_id'], 'country_id': country_id, 'authorized': authorized, } # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) # Count CDR import count_import = count_import + 1 local_count_import = local_count_import + 1 # print_shell(shell, "Sync CDR (cid:%s, dest:%s, dur:%s, " \ # " hg:%s,country:%s, auth:%s, row_count:%s)" % ( # caller_id_number, # destination_number, # duration, # cdr['variables']['hangup_cause_q850'], # country_id, # authorized, # count_import)) # DAILY_ANALYTIC daily_date = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # DAILY_ANALYTIC daily_date = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic(daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic(daily_date, start_uepoch, switch.id, country_id, accountcode, duration) # Flag the CDR as imported importcdr_handler.update( {'_id': cdr['_id']}, { '$set': { 'import_cdr': 1, } } ) if local_count_import > 0: # Bulk cdr list insert into cdr_common CDR_COMMON.insert(cdr_bulk_record) # Reset counter to zero local_count_import = 0 print_shell(shell, "Switch(%s) - currently imported CDRs:%d" % (ipaddress, count_import)) print_shell(shell, "Import on Switch(%s) - Total Record(s) imported:%d" % (ipaddress, count_import))
except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) accountcode = row[7] uniqueid = row[8] start_uepoch = datetime.fromtimestamp(int(row[1])) # Check Destination number destination_number = row[0] if (len(destination_number) <= settings.INTERNAL_CALL or destination_number[:1].isalpha()): authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] #Option to get the direction from user_field direction = "inbound" if len(callerid_number) == 4: direction = "outbound" # Prepare global CDR cdr_record = { 'switch_id': switch.id, 'caller_id_number': callerid_number.decode('utf-8', 'ignore'), 'caller_id_name': callerid_name.decode('utf-8', 'ignore'), 'destination_number': destination_number.decode('utf-8', 'ignore'), 'duration': duration,
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader( request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader( request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] local_count_import = 0 PAGE_SIZE = 1000 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni(j, row, 'caller_id_name') caller_id_number = get_value_from_uni(j, row, 'caller_id_number') direction = get_value_from_uni(j, row, 'direction') remote_media_ip = get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = get_value_from_uni(j, row, 'end_uepoch') mduration = get_value_from_uni(j, row, 'mduration') billmsec = get_value_from_uni(j, row, 'billmsec') read_codec = get_value_from_uni(j, row, 'read_codec') write_codec = get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode' and request.POST.get("accountcode_csv"): accountcode = request.POST["accountcode_csv"] if not accountcode and request.POST.get("accountcode") != '0': accountcode = get_cdr_from_row['accountcode'] # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row['caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) if request.POST.get('import_asterisk') \ and request.POST['import_asterisk'] == 'on': hangup_cause_name = "_".join(get_cdr_from_row['hangup_cause_id'].upper().split(' ')) hangup_cause_id =\ get_hangupcause_id_from_name(hangup_cause_name) else: hangup_cause_id =\ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(float(get_cdr_from_row['start_uepoch']))) destination_number = get_cdr_from_row['destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = generate_global_cdr_record(switch_id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, 'CSV_IMPORT', '', country_id, authorized) # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MONGO_CDRSTATS['CDR_COMMON']] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) local_count_import = local_count_import + 1 if local_count_import == PAGE_SIZE: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] date_start_uepoch = get_cdr_from_row['start_uepoch'] common_function_to_create_analytic(date_start_uepoch, start_uepoch, switch_id, country_id, accountcode, hangup_cause_id, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) # remaining record if cdr_bulk_record: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] if cdr_record_count > 0: # Apply index apply_index(shell=True) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext(request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = "" # will contain CSV data msg = "" success_import_list = [] error_import_list = [] type_error_import_list = [] # TODO : Too many indentation in the code, refact, less if, for # TODO : respect DRY principale, some of the code is duplicate if request.method == "POST": form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES["csv_file"], delimiter=",", quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES["csv_file"], delimiter=",", quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if row and str(row[0]) > 0: row = striplist(row) try: accountcode = "" # extra fields to import caller_id_name = "" direction = "outbound" remote_media_ip = "" answer_uepoch = "" end_uepoch = "" mduration = "" billmsec = "" write_codec = "" read_codec = "" get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] # get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni(j, row, "caller_id_name") caller_id_number = get_value_from_uni(j, row, "caller_id_number") direction = get_value_from_uni(j, row, "direction") remote_media_ip = get_value_from_uni(j, row, "remote_media_ip") answer_uepoch = get_value_from_uni(j, row, "answer_uepoch") end_uepoch = get_value_from_uni(j, row, "end_uepoch") mduration = get_value_from_uni(j, row, "mduration") billmsec = get_value_from_uni(j, row, "billmsec") read_codec = get_value_from_uni(j, row, "read_codec") write_codec = get_value_from_uni(j, row, "write_codec") row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == "accountcode": accountcode = request.POST[i + "_csv"] if not accountcode: accountcode = get_cdr_from_row["accountcode"] # Mandatory fields to import switch_id = int(request.POST["switch"]) caller_id_number = get_cdr_from_row["caller_id_number"] duration = int(get_cdr_from_row["duration"]) billsec = int(get_cdr_from_row["billsec"]) hangup_cause_id = get_hangupcause_id(int(get_cdr_from_row["hangup_cause_id"])) start_uepoch = datetime.datetime.fromtimestamp(int(get_cdr_from_row["start_uepoch"])) destination_number = get_cdr_from_row["destination_number"] uuid = get_cdr_from_row["uuid"] destination_data = chk_destination(destination_number) authorized = destination_data["authorized"] country_id = destination_data["country_id"] # Extra fields to import if answer_uepoch: answer_uepoch = datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { "switch_id": int(request.POST["switch"]), "caller_id_number": caller_id_number, "caller_id_name": caller_id_name, "destination_number": destination_number, "duration": duration, "billsec": billsec, "hangup_cause_id": hangup_cause_id, "accountcode": accountcode, "direction": direction, "uuid": uuid, "remote_media_ip": remote_media_ip, "start_uepoch": start_uepoch, "answer_uepoch": answer_uepoch, "end_uepoch": end_uepoch, "mduration": mduration, "billmsec": billmsec, "read_codec": read_codec, "write_codec": write_codec, "cdr_type": "CSV_IMPORT", "cdr_object_id": "", "country_id": country_id, "authorized": authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MONGO_CDRSTATS["CDR_COMMON"]] query_var = {} query_var["uuid"] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _("CDR already exists !!") error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.fromtimestamp( int(get_cdr_from_row["start_uepoch"][:10]) ) # insert daily analytic record create_daily_analytic( daily_date, switch_id, country_id, accountcode, hangup_cause_id, duration ) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic( daily_date, start_uepoch, switch_id, country_id, accountcode, duration ) cdr_record_count = cdr_record_count + 1 msg = _( "%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!" ) % {"cdr_record_count": cdr_record_count, "total_rows": total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { "title": _("Import CDR"), "form": form, "opts": opts, "model_name": opts.object_name.lower(), "app_label": app_label, "rdr": rdr, "msg": msg, "success_import_list": success_import_list, "error_import_list": error_import_list, "type_error_import_list": type_error_import_list, "CDR_FIELD_LIST": list(CDR_FIELD_LIST), "CDR_FIELD_LIST_NUM": list(CDR_FIELD_LIST_NUM), }, ) template = "admin/cdr/switch/import_cdr.html" return render_to_response(template, context_instance=ctx)
def setUp(self): """Create model object""" self.user = User.objects.get(username='******') # AlertRemovePrefix model self.alert_remove_prefix = AlertRemovePrefix(label='test', prefix=32) self.alert_remove_prefix.save() self.assertEquals(self.alert_remove_prefix.__unicode__(), 'test') # Alarm model self.alarm = Alarm(user=self.user, name='Alarm name', period=1, type=1, alert_condition=1, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**') self.alarm.save() self.assertEquals(self.alarm.__unicode__(), 'Alarm name') self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=1, type=1, alert_condition=2, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=1, type=1, alert_condition=3, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=1, type=1, alert_condition=4, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=1, type=1, alert_condition=5, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=1, type=1, alert_condition=6, alert_value=10, alert_condition_add_on=1, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=2, type=1, alert_condition=3, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=2, type=1, alert_condition=4, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=3, type=1, alert_condition=5, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() self.alarm_new = Alarm(user=self.user, name='Alarm name new', period=3, type=1, alert_condition=6, alert_value=10, alert_condition_add_on=2, status=1, email_to_send_alarm='*****@*****.**') self.alarm_new.save() # AlarmReport model self.alarm_report = AlarmReport(alarm=self.alarm, calculatedvalue=10, status=1) self.alarm_report.save() self.assertEquals(self.alarm_report.__unicode__(), 'Alarm name') self.country = Country.objects.get(pk=198) # Blacklist model self.blacklist = Blacklist(user=self.user, phonenumber_prefix=32, country=self.country) self.blacklist.save() self.assertTrue(self.blacklist.__unicode__()) # Whitelist model self.whitelist = Whitelist(user=self.user, phonenumber_prefix=32, country=self.country) self.whitelist.save() self.assertTrue(self.whitelist.__unicode__()) chk_destination('9999787424')
def func_importcdr_aggregate(shell, importcdr_handler, switch, ipaddress): """ function go through the current mongodb, then will - create CDR_COMMON - build the pre-aggregate """ #We limit the import tasks to a maximum - 1000 #This will reduce the speed but that s the only way to make sure #we dont have several time the same tasks running PAGE_SIZE = 1000 count_import = 0 local_count_import = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] result = importcdr_handler.find( { '$or': [{'import_cdr': {'$exists': False}}, {'import_cdr': 0}] }, { "callflow.caller_profile.caller_id_number": 1, "callflow.caller_profile.caller_id_name": 1, "callflow.caller_profile.destination_number": 1, "variables.duration": 1, "variables.billsec": 1, "variables.hangup_cause_q850": 1, "variables.accountcode": 1, "variables.direction": 1, "variables.uuid": 1, "variables.remote_media_ip": 1, "variables.start_uepoch": 1, "variables.answer_uepoch": 1, "variables.end_uepoch": 1, "variables.mduration": 1, "variables.billmsec": 1, "variables.read_codec": 1, "variables.write_codec": 1, "import_cdr_monthly": 1, "import_cdr_daily": 1, "import_cdr_hourly": 1, }).limit(PAGE_SIZE) #Retrieve FreeSWITCH CDRs for cdr in result: #find result so let's look later for more records start_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['start_uepoch'])[:10])) answer_uepoch = '' if cdr['variables']['answer_uepoch']: answer_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['answer_uepoch'])[:10])) end_uepoch = '' if cdr['variables']['end_uepoch']: end_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['end_uepoch'])[:10])) # Check Destination number print(cdr) destination_number = cdr['callflow'][0]['caller_profile']['destination_number'] if len(destination_number) <= settings.INTERNAL_CALL: authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] hangup_cause_id = get_hangupcause_id(cdr['variables']['hangup_cause_q850']) #Retrieve Element from CDR Object data_element = get_element(cdr) accountcode = data_element['accountcode'] remote_media_ip = data_element['remote_media_ip'] caller_id_number = data_element['caller_id_number'] caller_id_name = data_element['caller_id_name'] duration = data_element['duration'] billsec = data_element['billsec'] direction = data_element['direction'] uuid = data_element['uuid'] mduration = data_element['mduration'] billmsec = data_element['billmsec'] read_codec = data_element['read_codec'] write_codec = data_element['write_codec'] # Prepare global CDR cdr_record = generate_global_cdr_record(switch.id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, CDR_TYPE["freeswitch"], cdr['_id'], country_id, authorized) # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) # Count CDR import count_import = count_import + 1 local_count_import = local_count_import + 1 # print_shell(shell, "Sync CDR (cid:%s, dest:%s, dur:%s, " \ # " hg:%s,country:%s, auth:%s, row_count:%s)" % ( # caller_id_number, # destination_number, # duration, # cdr['variables']['hangup_cause_q850'], # country_id, # authorized, # count_import)) date_start_uepoch = int(str(cdr['variables']['start_uepoch'])[:10]) common_function_to_create_analytic(date_start_uepoch, start_uepoch, switch.id, country_id, accountcode, hangup_cause_id, duration) # Flag the CDR as imported importcdr_handler.update( {'_id': cdr['_id']}, { '$set': { 'import_cdr': 1, } } ) if local_count_import > 0: # Bulk cdr list insert into cdr_common CDR_COMMON.insert(cdr_bulk_record) # Reset counter to zero local_count_import = 0 print_shell(shell, "Switch(%s) - currently imported CDRs:%d" % (ipaddress, count_import)) print_shell(shell, "Import on Switch(%s) - Total Record(s) imported:%d" % (ipaddress, count_import))
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_contact.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] #TODO : Too many indentation in the code, refact, less if, for #TODO : respect DRY principale, some of the code is duplicate #from import tasks if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() \ if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = \ get_value_from_uni(j, row, 'caller_id_name') caller_id_number = \ get_value_from_uni(j, row, 'caller_id_number') direction = \ get_value_from_uni(j, row, 'direction') remote_media_ip = \ get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = \ get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = \ get_value_from_uni(j, row, 'end_uepoch') mduration = \ get_value_from_uni(j, row, 'mduration') billmsec = \ get_value_from_uni(j, row, 'billmsec') read_codec = \ get_value_from_uni(j, row, 'read_codec') write_codec = \ get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode': accountcode = int(request.POST[i + "_csv"]) if not accountcode: accountcode = int(get_cdr_from_row['accountcode']) # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row['caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) hangup_cause_id = \ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(get_cdr_from_row['start_uepoch'])) destination_number = get_cdr_from_row['destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { 'switch_id': int(request.POST['switch']), 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': 'CSV_IMPORT', 'cdr_object_id': '', 'country_id': country_id, 'authorized': authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MG_CDR_COMMON] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.\ fromtimestamp(int(get_cdr_from_row['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic(daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic(daily_date, start_uepoch, switch.id, country_id, accountcode, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext(request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] local_count_import = 0 PAGE_SIZE = 1000 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni( j, row, 'caller_id_name') caller_id_number = get_value_from_uni( j, row, 'caller_id_number') direction = get_value_from_uni( j, row, 'direction') remote_media_ip = get_value_from_uni( j, row, 'remote_media_ip') answer_uepoch = get_value_from_uni( j, row, 'answer_uepoch') end_uepoch = get_value_from_uni( j, row, 'end_uepoch') mduration = get_value_from_uni( j, row, 'mduration') billmsec = get_value_from_uni( j, row, 'billmsec') read_codec = get_value_from_uni( j, row, 'read_codec') write_codec = get_value_from_uni( j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode' and request.POST.get( "accountcode_csv"): accountcode = request.POST[ "accountcode_csv"] if not accountcode and request.POST.get( "accountcode") != '0': accountcode = get_cdr_from_row[ 'accountcode'] # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row[ 'caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) if request.POST.get('import_asterisk') \ and request.POST['import_asterisk'] == 'on': hangup_cause_name = "_".join( get_cdr_from_row['hangup_cause_id']. upper().split(' ')) hangup_cause_id =\ get_hangupcause_id_from_name(hangup_cause_name) else: hangup_cause_id =\ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(float(get_cdr_from_row['start_uepoch']))) destination_number = get_cdr_from_row[ 'destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination( destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = generate_global_cdr_record( switch_id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, 'CSV_IMPORT', '', country_id, authorized) # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[ settings.MONGO_CDRSTATS['CDR_COMMON']] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) local_count_import = local_count_import + 1 if local_count_import == PAGE_SIZE: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] date_start_uepoch = get_cdr_from_row[ 'start_uepoch'] common_function_to_create_analytic( date_start_uepoch, start_uepoch, switch_id, country_id, accountcode, hangup_cause_id, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) # remaining record if cdr_bulk_record: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] if cdr_record_count > 0: # Apply index apply_index(shell=True) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)