def test_functions(self): get_switch_list() get_hangupcause_name(self.hangupcause.pk) get_hangupcause_name(2) get_hangupcause_id(self.hangupcause.code) # Template tags hangupcause_name_with_title(self.hangupcause.pk) get_country_id_prefix(['44', '442'])
def test_functions(self): get_switch_list() get_hangupcause_name(self.hangupcause.pk) get_hangupcause_name(2) get_hangupcause_id(self.hangupcause.code) # Template tags hangupcause_name_with_title(self.hangupcause.pk) value = {'_id': {'val': 1}} mongo_id(value, 'val') get_hc_list() get_country_id(['44', '442'])
def test_functions(self): get_switch_list() get_hangupcause_name(self.hangupcause.pk) get_hangupcause_name(2) get_hangupcause_id(self.hangupcause.code) # Template tags hangupcause_name_with_title(self.hangupcause.pk) value = {'_id': {'val': 1}} mongo_id(value, 'val') seen_unseen(value) seen_unseen('') seen_unseen_word(value) seen_unseen_word('')
def translate_disposition(disposition): """ function to convert asterisk disposition to a internal hangup_cause_id """ try: id_disposition = DICT_DISPOSITION.get(disposition.encode("utf-8"), 0) transdisposition = DISPOSITION_TRANSLATION[id_disposition] except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) return hangup_cause_id
def translate_disposition(disposition): """ function to convert asterisk disposition to a internal hangup_cause_id """ try: id_disposition = DICT_DISPOSITION.get( disposition.encode("utf-8"), 0) transdisposition = DISPOSITION_TRANSLATION[id_disposition] except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) return hangup_cause_id
def run_alarm(alarm_obj, logger): """Alarm object""" if alarm_obj.type == ALARM_TYPE.ALOC: # ALOC (average length of call) logger.debug('ALOC (average length of call)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data query_var = {} query_var['metadata.date'] = { '$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date'] } pipeline = pipeline_cdr_alert_task(query_var) pre_total_data = settings.DBCON.command( 'aggregate', settings.MONGO_CDRSTATS['DAILY_ANALYTIC'], pipeline=pipeline) pre_day_data = {} for doc in pre_total_data['result']: pre_date = dt_list['p_start_date'] pre_day_data[pre_date.strftime('%Y-%m-%d')] = doc['duration_avg'] if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, doc['duration_avg']) else: previous_date_duration = doc['duration_avg'] # Current date data query_var = {} query_var['metadata.date'] = { '$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date'] } # current date pipeline = pipeline_cdr_alert_task(query_var) cur_total_data = settings.DBCON.command( 'aggregate', settings.MONGO_CDRSTATS['DAILY_ANALYTIC'], pipeline=pipeline) cur_day_data = {} for doc in cur_total_data['result']: cur_date = dt_list['c_start_date'] cur_day_data[cur_date.strftime('%Y-%m-%d')] = doc['duration_avg'] if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, doc['duration_avg']) else: current_date_duration = doc['duration_avg'] chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) if alarm_obj.type == ALARM_TYPE.ASR: # ASR (Answer Seize Ratio) logger.debug('ASR (Answer Seize Ratio)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date'] } pre_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) pre_total_answered_record = cdr_data.find(query_var).count() previous_asr = pre_total_answered_record / pre_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date'] } cur_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) cur_total_answered_record = cdr_data.find(query_var).count() current_asr = cur_total_answered_record / cur_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, current_asr) else: chk_alert_value(alarm_obj, current_asr, previous_asr) return True
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader( request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader( request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] local_count_import = 0 PAGE_SIZE = 1000 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni(j, row, 'caller_id_name') caller_id_number = get_value_from_uni(j, row, 'caller_id_number') direction = get_value_from_uni(j, row, 'direction') remote_media_ip = get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = get_value_from_uni(j, row, 'end_uepoch') mduration = get_value_from_uni(j, row, 'mduration') billmsec = get_value_from_uni(j, row, 'billmsec') read_codec = get_value_from_uni(j, row, 'read_codec') write_codec = get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode' and request.POST.get("accountcode_csv"): accountcode = request.POST["accountcode_csv"] if not accountcode and request.POST.get("accountcode") != '0': accountcode = get_cdr_from_row['accountcode'] # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row['caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) if request.POST.get('import_asterisk') \ and request.POST['import_asterisk'] == 'on': hangup_cause_name = "_".join(get_cdr_from_row['hangup_cause_id'].upper().split(' ')) hangup_cause_id =\ get_hangupcause_id_from_name(hangup_cause_name) else: hangup_cause_id =\ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(float(get_cdr_from_row['start_uepoch']))) destination_number = get_cdr_from_row['destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = generate_global_cdr_record(switch_id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, 'CSV_IMPORT', '', country_id, authorized) # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MONGO_CDRSTATS['CDR_COMMON']] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) local_count_import = local_count_import + 1 if local_count_import == PAGE_SIZE: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] date_start_uepoch = get_cdr_from_row['start_uepoch'] common_function_to_create_analytic(date_start_uepoch, start_uepoch, switch_id, country_id, accountcode, hangup_cause_id, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) # remaining record if cdr_bulk_record: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] if cdr_record_count > 0: # Apply index apply_index(shell=True) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext(request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def import_cdr(shell=False, logger=False): """ Connect to the `import_cdr` Database and import the new CDRs """ count_imported = 0 log_print(logger, shell, "in func import_cdr...") if not check_connection_sql(): log_print(logger, shell, "check_connection_sql - Error Connection") return (False, "Error Connection") # Each time the task is running we will only take CDR_IMPORT_LIMIT records to import # This define the max speed of import, this limit could be changed new_CDRs = CDRImport.objects.using('import_cdr')\ .filter(imported=False)\ .order_by('-id')[:settings.CDR_IMPORT_LIMIT] (list_newcdr, list_cdrid) = ([], []) for call in new_CDRs: # Increment counter count_imported = count_imported + 1 # Get the dialcode dialcode = get_dialcode(call.destination_number, call.dialcode) switch_info = chk_ipaddress(call.switch) # Check Destination number if len(call.destination_number) <= settings.INTERNAL_CALL or call.destination_number[:1].isalpha(): authorized = 1 country_id = None call_type = CALL_TYPE.INTERNAL else: # TODO: rename verify_auth_dest_number verify_auth_dest_number destination_data = verify_auth_dest_number(call.destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] call_type = CALL_TYPE.INTERNATIONAL # Sanitize direction if call.direction: direction = call.direction else: direction = CALL_DIRECTION.NOTDEFINED # Find the user for given accountcode try: user = AccountCode.objects.get(accountcode=call.accountcode).user except: # Cannot assign accountcode to an existing user, therefore we will assign to an Admin user = User.objects.filter(is_superuser=True)[0] try: user_profile = user.userprofile except: user_profile = UserProfile(user=user) user_profile.save() # Retrieve VoipPlan if user_profile: voipplan_id = user_profile.voipplan_id else: voipplan_id = False print_shell(shell, "VoipPlan doesn't exist for this user/accountcode (%s)" % call.accountcode) if call.buy_rate or call.buy_cost or call.sell_rate or call.sell_cost: buy_rate = call.buy_rate buy_cost = call.buy_cost sell_rate = call.sell_rate sell_cost = call.sell_cost elif voipplan_id: call_rate = calculate_call_cost(voipplan_id, call.destination_number, call.billsec) buy_rate = call_rate['buy_rate'] buy_cost = call_rate['buy_cost'] sell_rate = call_rate['sell_rate'] sell_cost = call_rate['sell_cost'] else: buy_rate = buy_cost = sell_rate = sell_cost = 0 hangup_cause_id = get_hangupcause_id(call.hangup_cause_id) log_print(logger, shell, "Create new CDR -> date:%s - dst:%s - duration:%s - hangup_cause:%s - sell_cost:%s" % (call.starting_date, call.destination_number, str(call.duration), str(hangup_cause_id), str(call.sell_cost))) # Create the new CDR newCDR = CDR( user=user, switch=switch_info['switch'], cdr_source_type=call.cdr_source_type, callid=call.callid, caller_id_number=call.caller_id_number, caller_id_name=call.caller_id_name, destination_number=call.destination_number, dialcode_id=dialcode, starting_date=call.starting_date, duration=call.duration, billsec=call.billsec, progresssec=call.progresssec, answersec=call.answersec, waitsec=call.waitsec, hangup_cause_id=hangup_cause_id, direction=direction, country_id=country_id, authorized=authorized, accountcode='' if call.accountcode is None else call.accountcode, buy_rate=buy_rate, buy_cost=buy_cost, sell_rate=sell_rate, sell_cost=sell_cost, call_type=call_type, data='' if call.extradata is None else call.extradata) list_newcdr.append(newCDR) list_cdrid.append(str(call.id)) if (count_imported % 100) == 0: bulk_create_cdrs(list_newcdr, list_cdrid) (list_newcdr, list_cdrid) = ([], []) # we exit the loop but we might still have some remaining CDRs to push if len(list_newcdr) > 0: bulk_create_cdrs(list_newcdr, list_cdrid) (list_newcdr, list_cdrid) = ([], []) log_print(logger, shell, 'TASK :: run_cdr_import -> func import_cdr count_imported:%d' % count_imported) return (True, count_imported)
callerid_number = callerid channel = row[3] if not channel: channel = '' # Set empty string for channel in case is None duration = set_int_default(row[4], 0) billsec = set_int_default(row[5], 0) ast_disposition = row[6] try: id_disposition = dic_disposition.get( ast_disposition.encode("utf-8"), 0) transdisposition = DISPOSITION_TRANSLATION[id_disposition] except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) accountcode = row[7] uniqueid = row[8] start_uepoch = datetime.fromtimestamp(int(row[1])) # Check Destination number destination_number = row[0] if (len(destination_number) <= settings.INTERNAL_CALL or destination_number[:1].isalpha()): authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id']
def run_alarm(alarm_obj, logger): """ Perform Alarm Check """ running_alarm_test_data = {"running_alarm_status": True, "current_value": None, "previous_value": None} user = False switch_id = 0 if alarm_obj.type == ALARM_TYPE.ALOC: # ALOC (average length of call) logger.debug("ALOC (Average Length Of Call)") # return start and end date of previous/current day date_dict = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data start_date = date_dict["p_start_date"] end_date = date_dict["p_end_date"] daily_data = get_report_cdr_per_switch(user, "day", start_date, end_date, switch_id) total_calls = daily_data["nbcalls"]["total"] total_duration = daily_data["duration"]["total"] ACD = math.floor(total_duration / total_calls) if ( alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN ): running_alarm_test_data["previous_value"] = ACD chk_alert_value(alarm_obj, ACD) else: previous_date_duration = ACD # Current date data start_date = date_dict["c_start_date"] end_date = date_dict["c_end_date"] daily_data = get_report_cdr_per_switch(user, "day", start_date, end_date, switch_id) total_calls = daily_data["nbcalls"]["total"] total_duration = daily_data["duration"]["total"] ACD = math.floor(total_duration / total_calls) if ( alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN ): running_alarm_test_data["current_value"] = ACD chk_alert_value(alarm_obj, ACD) else: current_date_duration = ACD running_alarm_test_data["current_value"] = ACD running_alarm_test_data["previous_value"] = previous_date_duration chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) elif alarm_obj.type == ALARM_TYPE.ASR: # ASR (Answer Seize Ratio) logger.debug("ASR (Answer Seize Ratio)") # return start and end date of previous/current day date_dict = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data start_date = date_dict["p_start_date"] end_date = date_dict["p_end_date"] limit = 10 hangup_cause_id = False # TODO: Regroup the 2 calls to custom_sql_aggr_top_hangup to get the hangup ( hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost, ) = custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) pre_total_record = total_calls hangup_cause_id = get_hangupcause_id(hangup_cause_q850) ( hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost, ) = custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) pre_total_answered_record = total_calls # pre_total_record should not be 0 pre_total_record = 1 if pre_total_record == 0 else pre_total_record previous_asr = pre_total_answered_record / pre_total_record if ( alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN ): running_alarm_test_data["previous_value"] = previous_asr chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data start_date = date_dict["c_start_date"] end_date = date_dict["c_end_date"] limit = 10 hangup_cause_id = False # TODO: Regroup the 2 calls to custom_sql_aggr_top_hangup to get the hangup ( hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost, ) = custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) cur_total_record = total_calls hangup_cause_id = get_hangupcause_id(hangup_cause_q850) ( hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost, ) = custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) cur_total_answered_record = total_calls # cur_total_record should not be 0 cur_total_record = 1 if cur_total_record == 0 else cur_total_record current_asr = cur_total_answered_record / cur_total_record if ( alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN ): running_alarm_test_data["current_value"] = current_asr chk_alert_value(alarm_obj, current_asr) else: running_alarm_test_data["current_value"] = current_asr running_alarm_test_data["previous_value"] = previous_asr chk_alert_value(alarm_obj, current_asr, previous_asr) return running_alarm_test_data
def run_alarm(alarm_obj, logger): """Alarm object""" if alarm_obj.type == 1: # ALOC (average length of call) logger.debug("ALOC (average length of call)") # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data query_var = {} query_var["start_uepoch"] = {"$gte": dt_list["p_start_date"], "$lte": dt_list["p_end_date"]} # previous date map_reduce pre_total_data = cdr_data.map_reduce(map, reduce, out, query=query_var, finalize=finalfc) pre_total_data = pre_total_data.find().sort([("_id.a_Year", -1), ("_id.b_Month", -1)]) pre_day_data = {} for doc in pre_total_data: pre_date = dt_list["p_start_date"] pre_day_data[pre_date.strftime("%Y-%m-%d")] = doc["value"]["duration__avg"] if alarm_obj.alert_condition == 1 or alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, doc["value"]["duration__avg"]) else: previous_date_duration = doc["value"]["duration__avg"] # Current date data query_var = {} query_var["start_uepoch"] = {"$gte": dt_list["c_start_date"], "$lte": dt_list["c_end_date"]} # current date map_reduce cur_total_data = cdr_data.map_reduce(map, reduce, out, query=query_var, finalize=finalfc) cur_total_data = cur_total_data.find().sort([("_id.a_Year", -1), ("_id.b_Month", -1)]) cur_day_data = {} for doc in cur_total_data: cur_date = dt_list["c_start_date"] cur_day_data[cur_date.strftime("%Y-%m-%d")] = doc["value"]["duration__avg"] if alarm_obj.alert_condition == 1 or alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, doc["value"]["duration__avg"]) else: current_date_duration = doc["value"]["duration__avg"] chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) if alarm_obj.type == 2: # ASR (Answer Seize Ratio) logger.debug("ASR (Answer Seize Ratio)") # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data query_var = {} query_var["start_uepoch"] = {"$gte": dt_list["p_start_date"], "$lte": dt_list["p_end_date"]} pre_total_record = cdr_data.find(query_var).count() query_var["hangup_cause_id"] = get_hangupcause_id(hangup_cause_q850) pre_total_answered_record = cdr_data.find(query_var).count() previous_asr = pre_total_answered_record / pre_total_record if alarm_obj.alert_condition == 1 or alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data query_var = {} query_var["start_uepoch"] = {"$gte": dt_list["c_start_date"], "$lte": dt_list["c_end_date"]} cur_total_record = cdr_data.find(query_var).count() query_var["hangup_cause_id"] = get_hangupcause_id(hangup_cause_q850) cur_total_answered_record = cdr_data.find(query_var).count() current_asr = cur_total_answered_record / cur_total_record if alarm_obj.alert_condition == 1 or alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, current_asr) else: chk_alert_value(alarm_obj, current_asr, previous_asr) return True
def create(self, request=None, **kwargs): logger.debug('CDR API get called') j = 0 post_var = {} for i in request.POST: if j == 0: post_var = i j = j + 1 import ast post_var = ast.literal_eval(post_var) switch_id = post_var.get('switch_id') caller_id_number = post_var.get('caller_id_number') caller_id_name = post_var.get('caller_id_name') destination_number = post_var.get('destination_number') duration = post_var.get('duration') billsec = post_var.get('billsec') hangup_cause_q850 = post_var.get('hangup_cause_q850') accountcode = post_var.get('accountcode') direction = post_var.get('direction') uuid = post_var.get('uuid') remote_media_ip = post_var.get('remote_media_ip') start_uepoch = post_var.get('start_uepoch') answer_uepoch = post_var.get('answer_uepoch') end_uepoch = post_var.get('end_uepoch') mduration = post_var.get('mduration') billmsec = post_var.get('billmsec') read_codec = post_var.get('read_codec') write_codec = post_var.get('write_codec') cdr_type = post_var.get('cdr_type') cdr_record = { 'switch_id': switch_id, 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': int(duration), 'billsec': int(billsec), 'hangup_cause_id': get_hangupcause_id(hangup_cause_q850), 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': cdr_type, } # Create CDR record settings.DBCON[settings.MONGO_CDRSTATS['CDR_COMMON']].insert(cdr_record) # get last inserted cdr record new_obj = settings.DBCON[settings.MONGO_CDRSTATS['CDR_COMMON']].find_one() # print new_obj['_id'] logger.debug('CDR API : result ok 200') return self.create_response(request, new_obj)
def run_alarm(alarm_obj, logger): """Alarm object""" if alarm_obj.type == ALARM_TYPE.ALOC: # ALOC (average length of call) logger.debug('ALOC (average length of call)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data query_var = {} query_var['metadata.date'] = {'$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date']} pipeline = pipeline_cdr_alert_task(query_var) pre_total_data = settings.DBCON.command('aggregate', settings.MONGO_CDRSTATS['DAILY_ANALYTIC'], pipeline=pipeline) pre_day_data = {} for doc in pre_total_data['result']: pre_date = dt_list['p_start_date'] pre_day_data[pre_date.strftime('%Y-%m-%d')] = doc['duration_avg'] if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, doc['duration_avg']) else: previous_date_duration = doc['duration_avg'] # Current date data query_var = {} query_var['metadata.date'] = {'$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date']} # current date pipeline = pipeline_cdr_alert_task(query_var) cur_total_data = settings.DBCON.command('aggregate', settings.MONGO_CDRSTATS['DAILY_ANALYTIC'], pipeline=pipeline) cur_day_data = {} for doc in cur_total_data['result']: cur_date = dt_list['c_start_date'] cur_day_data[cur_date.strftime('%Y-%m-%d')] = doc['duration_avg'] if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, doc['duration_avg']) else: current_date_duration = doc['duration_avg'] chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) if alarm_obj.type == ALARM_TYPE.ASR: # ASR (Answer Seize Ratio) logger.debug('ASR (Answer Seize Ratio)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data query_var = {} query_var['start_uepoch'] = {'$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date']} pre_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) pre_total_answered_record = cdr_data.find(query_var).count() previous_asr = pre_total_answered_record / pre_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data query_var = {} query_var['start_uepoch'] = {'$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date']} cur_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) cur_total_answered_record = cdr_data.find(query_var).count() current_asr = cur_total_answered_record / cur_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: chk_alert_value(alarm_obj, current_asr) else: chk_alert_value(alarm_obj, current_asr, previous_asr) return True
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_contact.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] #TODO : Too many indentation in the code, refact, less if, for #TODO : respect DRY principale, some of the code is duplicate #from import tasks if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() \ if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = \ get_value_from_uni(j, row, 'caller_id_name') caller_id_number = \ get_value_from_uni(j, row, 'caller_id_number') direction = \ get_value_from_uni(j, row, 'direction') remote_media_ip = \ get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = \ get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = \ get_value_from_uni(j, row, 'end_uepoch') mduration = \ get_value_from_uni(j, row, 'mduration') billmsec = \ get_value_from_uni(j, row, 'billmsec') read_codec = \ get_value_from_uni(j, row, 'read_codec') write_codec = \ get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode': accountcode = int( request.POST[i + "_csv"]) if not accountcode: accountcode = int( get_cdr_from_row['accountcode']) # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row[ 'caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) hangup_cause_id = \ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(get_cdr_from_row['start_uepoch'])) destination_number = get_cdr_from_row[ 'destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination( destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { 'switch_id': int(request.POST['switch']), 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': 'CSV_IMPORT', 'cdr_object_id': '', 'country_id': country_id, 'authorized': authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[ settings.MG_CDR_COMMON] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find( query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.\ fromtimestamp(int(get_cdr_from_row['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic( daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic( daily_date, start_uepoch, switch.id, country_id, accountcode, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def func_importcdr_aggregate(shell, importcdr_handler, switch, ipaddress): """ function go through the current mongodb, then will - create CDR_COMMON - build the pre-aggregate """ #We limit the import tasks to a maximum - 1000 #This will reduce the speed but that s the only way to make sure #we dont have several time the same tasks running PAGE_SIZE = 1000 count_import = 0 local_count_import = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] result = importcdr_handler.find( { '$or': [{'import_cdr': {'$exists': False}}, {'import_cdr': 0}] }, { "callflow.caller_profile.caller_id_number": 1, "callflow.caller_profile.caller_id_name": 1, "callflow.caller_profile.destination_number": 1, "variables.duration": 1, "variables.billsec": 1, "variables.hangup_cause_q850": 1, "variables.accountcode": 1, "variables.direction": 1, "variables.uuid": 1, "variables.remote_media_ip": 1, "variables.start_uepoch": 1, "variables.answer_uepoch": 1, "variables.end_uepoch": 1, "variables.mduration": 1, "variables.billmsec": 1, "variables.read_codec": 1, "variables.write_codec": 1, "import_cdr_monthly": 1, "import_cdr_daily": 1, "import_cdr_hourly": 1, }).limit(PAGE_SIZE) #Retrieve FreeSWITCH CDRs for cdr in result: #find result so let's look later for more records start_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['start_uepoch'])[:10])) answer_uepoch = '' if cdr['variables']['answer_uepoch']: answer_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['answer_uepoch'])[:10])) end_uepoch = '' if cdr['variables']['end_uepoch']: end_uepoch = datetime.datetime.fromtimestamp( int(str(cdr['variables']['end_uepoch'])[:10])) # Check Destination number print(cdr) destination_number = cdr['callflow'][0]['caller_profile']['destination_number'] if len(destination_number) <= settings.INTERNAL_CALL: authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] hangup_cause_id = get_hangupcause_id(cdr['variables']['hangup_cause_q850']) #Retrieve Element from CDR Object data_element = get_element(cdr) accountcode = data_element['accountcode'] remote_media_ip = data_element['remote_media_ip'] caller_id_number = data_element['caller_id_number'] caller_id_name = data_element['caller_id_name'] duration = data_element['duration'] billsec = data_element['billsec'] direction = data_element['direction'] uuid = data_element['uuid'] mduration = data_element['mduration'] billmsec = data_element['billmsec'] read_codec = data_element['read_codec'] write_codec = data_element['write_codec'] # Prepare global CDR cdr_record = generate_global_cdr_record(switch.id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, CDR_TYPE["freeswitch"], cdr['_id'], country_id, authorized) # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) # Count CDR import count_import = count_import + 1 local_count_import = local_count_import + 1 # print_shell(shell, "Sync CDR (cid:%s, dest:%s, dur:%s, " \ # " hg:%s,country:%s, auth:%s, row_count:%s)" % ( # caller_id_number, # destination_number, # duration, # cdr['variables']['hangup_cause_q850'], # country_id, # authorized, # count_import)) date_start_uepoch = int(str(cdr['variables']['start_uepoch'])[:10]) common_function_to_create_analytic(date_start_uepoch, start_uepoch, switch.id, country_id, accountcode, hangup_cause_id, duration) # Flag the CDR as imported importcdr_handler.update( {'_id': cdr['_id']}, { '$set': { 'import_cdr': 1, } } ) if local_count_import > 0: # Bulk cdr list insert into cdr_common CDR_COMMON.insert(cdr_bulk_record) # Reset counter to zero local_count_import = 0 print_shell(shell, "Switch(%s) - currently imported CDRs:%d" % (ipaddress, count_import)) print_shell(shell, "Import on Switch(%s) - Total Record(s) imported:%d" % (ipaddress, count_import))
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_contact.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] #TODO : Too many indentation in the code, refact, less if, for #TODO : respect DRY principale, some of the code is duplicate #from import tasks if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() \ if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = \ get_value_from_uni(j, row, 'caller_id_name') caller_id_number = \ get_value_from_uni(j, row, 'caller_id_number') direction = \ get_value_from_uni(j, row, 'direction') remote_media_ip = \ get_value_from_uni(j, row, 'remote_media_ip') answer_uepoch = \ get_value_from_uni(j, row, 'answer_uepoch') end_uepoch = \ get_value_from_uni(j, row, 'end_uepoch') mduration = \ get_value_from_uni(j, row, 'mduration') billmsec = \ get_value_from_uni(j, row, 'billmsec') read_codec = \ get_value_from_uni(j, row, 'read_codec') write_codec = \ get_value_from_uni(j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode': accountcode = int(request.POST[i + "_csv"]) if not accountcode: accountcode = int(get_cdr_from_row['accountcode']) # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row['caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) hangup_cause_id = \ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(get_cdr_from_row['start_uepoch'])) destination_number = get_cdr_from_row['destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { 'switch_id': int(request.POST['switch']), 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': 'CSV_IMPORT', 'cdr_object_id': '', 'country_id': country_id, 'authorized': authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MG_CDR_COMMON] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.\ fromtimestamp(int(get_cdr_from_row['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic(daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic(daily_date, start_uepoch, switch.id, country_id, accountcode, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext(request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)
def create(self, request=None, **kwargs): logger.debug('CDR API get called') j = 0 post_var = {} for i in request.POST: if j == 0: post_var = i j = j + 1 import ast post_var = ast.literal_eval(post_var) switch_id = post_var.get('switch_id') caller_id_number = post_var.get('caller_id_number') caller_id_name = post_var.get('caller_id_name') destination_number = post_var.get('destination_number') duration = post_var.get('duration') billsec = post_var.get('billsec') hangup_cause_q850 = post_var.get('hangup_cause_q850') accountcode = post_var.get('accountcode') direction = post_var.get('direction') uuid = post_var.get('uuid') remote_media_ip = post_var.get('remote_media_ip') start_uepoch = post_var.get('start_uepoch') answer_uepoch = post_var.get('answer_uepoch') end_uepoch = post_var.get('end_uepoch') mduration = post_var.get('mduration') billmsec = post_var.get('billmsec') read_codec = post_var.get('read_codec') write_codec = post_var.get('write_codec') cdr_type = post_var.get('cdr_type') cdr_record = { 'switch_id': switch_id, 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': int(duration), 'billsec': int(billsec), 'hangup_cause_id': get_hangupcause_id(hangup_cause_q850), 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, 'answer_uepoch': answer_uepoch, 'end_uepoch': end_uepoch, 'mduration': mduration, 'billmsec': billmsec, 'read_codec': read_codec, 'write_codec': write_codec, 'cdr_type': cdr_type, } # Create CDR record settings.DBCON[settings.MG_CDR_COMMON].insert(cdr_record) # get last inserted cdr record new_obj = settings.DBCON[settings.MG_CDR_COMMON].find_one() # print new_obj['_id'] logger.debug('CDR API : result ok 200') return self.create_response(request, new_obj)
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = "" # will contain CSV data msg = "" success_import_list = [] error_import_list = [] type_error_import_list = [] # TODO : Too many indentation in the code, refact, less if, for # TODO : respect DRY principale, some of the code is duplicate if request.method == "POST": form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES["csv_file"], delimiter=",", quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES["csv_file"], delimiter=",", quotechar='"') cdr_record_count = 0 # Read each Row for row in rdr: if row and str(row[0]) > 0: row = striplist(row) try: accountcode = "" # extra fields to import caller_id_name = "" direction = "outbound" remote_media_ip = "" answer_uepoch = "" end_uepoch = "" mduration = "" billmsec = "" write_codec = "" read_codec = "" get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] # get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni(j, row, "caller_id_name") caller_id_number = get_value_from_uni(j, row, "caller_id_number") direction = get_value_from_uni(j, row, "direction") remote_media_ip = get_value_from_uni(j, row, "remote_media_ip") answer_uepoch = get_value_from_uni(j, row, "answer_uepoch") end_uepoch = get_value_from_uni(j, row, "end_uepoch") mduration = get_value_from_uni(j, row, "mduration") billmsec = get_value_from_uni(j, row, "billmsec") read_codec = get_value_from_uni(j, row, "read_codec") write_codec = get_value_from_uni(j, row, "write_codec") row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == "accountcode": accountcode = request.POST[i + "_csv"] if not accountcode: accountcode = get_cdr_from_row["accountcode"] # Mandatory fields to import switch_id = int(request.POST["switch"]) caller_id_number = get_cdr_from_row["caller_id_number"] duration = int(get_cdr_from_row["duration"]) billsec = int(get_cdr_from_row["billsec"]) hangup_cause_id = get_hangupcause_id(int(get_cdr_from_row["hangup_cause_id"])) start_uepoch = datetime.datetime.fromtimestamp(int(get_cdr_from_row["start_uepoch"])) destination_number = get_cdr_from_row["destination_number"] uuid = get_cdr_from_row["uuid"] destination_data = chk_destination(destination_number) authorized = destination_data["authorized"] country_id = destination_data["country_id"] # Extra fields to import if answer_uepoch: answer_uepoch = datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = { "switch_id": int(request.POST["switch"]), "caller_id_number": caller_id_number, "caller_id_name": caller_id_name, "destination_number": destination_number, "duration": duration, "billsec": billsec, "hangup_cause_id": hangup_cause_id, "accountcode": accountcode, "direction": direction, "uuid": uuid, "remote_media_ip": remote_media_ip, "start_uepoch": start_uepoch, "answer_uepoch": answer_uepoch, "end_uepoch": end_uepoch, "mduration": mduration, "billmsec": billmsec, "read_codec": read_codec, "write_codec": write_codec, "cdr_type": "CSV_IMPORT", "cdr_object_id": "", "country_id": country_id, "authorized": authorized, } try: # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[settings.MONGO_CDRSTATS["CDR_COMMON"]] query_var = {} query_var["uuid"] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _("CDR already exists !!") error_import_list.append(row) else: # if not, insert record # record global CDR CDR_COMMON.insert(cdr_record) # start_uepoch = get_cdr_from_row['start_uepoch'] daily_date = datetime.datetime.fromtimestamp( int(get_cdr_from_row["start_uepoch"][:10]) ) # insert daily analytic record create_daily_analytic( daily_date, switch_id, country_id, accountcode, hangup_cause_id, duration ) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic( daily_date, start_uepoch, switch_id, country_id, accountcode, duration ) cdr_record_count = cdr_record_count + 1 msg = _( "%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!" ) % {"cdr_record_count": cdr_record_count, "total_rows": total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) if cdr_record_count > 0: apply_index() # Apply index DAILY_ANALYTIC.ensure_index([("metadata.date", -1)]) CDR_COMMON.ensure_index([("start_uepoch", -1)]) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { "title": _("Import CDR"), "form": form, "opts": opts, "model_name": opts.object_name.lower(), "app_label": app_label, "rdr": rdr, "msg": msg, "success_import_list": success_import_list, "error_import_list": error_import_list, "type_error_import_list": type_error_import_list, "CDR_FIELD_LIST": list(CDR_FIELD_LIST), "CDR_FIELD_LIST_NUM": list(CDR_FIELD_LIST_NUM), }, ) template = "admin/cdr/switch/import_cdr.html" return render_to_response(template, context_instance=ctx)
def func_importcdr_aggregate(shell, importcdr_handler, switch, ipaddress): """ function go through the current mongodb, then will - create CDR_COMMON - build the pre-aggregate """ #We limit the import tasks to a maximum - 1000 #This will reduce the speed but that s the only way to make sure #we dont have several time the same tasks running PAGE_SIZE = 1000 count_import = 0 local_count_import = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] result = importcdr_handler.find( { '$or': [{'import_cdr': {'$exists': False}}, {'import_cdr': 0}] }, { "callflow.caller_profile.caller_id_number": 1, "callflow.caller_profile.caller_id_name": 1, "callflow.caller_profile.destination_number": 1, "variables.duration": 1, "variables.billsec": 1, "variables.hangup_cause_q850": 1, "variables.accountcode": 1, "variables.direction": 1, "variables.uuid": 1, "variables.remote_media_ip": 1, "variables.start_uepoch": 1, #"variables.answer_uepoch": 1, #"variables.end_uepoch": 1, #"variables.mduration": 1, #"variables.billmsec": 1, #"variables.read_codec": 1, #"variables.write_codec": 1, "import_cdr_monthly": 1, "import_cdr_daily": 1, "import_cdr_hourly": 1, }).limit(PAGE_SIZE) #Retrieve FreeSWITCH CDRs for cdr in result: #find result so let's look later for more records start_uepoch = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # Check Destination number destination_number = cdr['callflow']['caller_profile']['destination_number'] if len(destination_number) <= settings.INTERNAL_CALL: authorized = 1 country_id = 999 else: destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] hangup_cause_id = get_hangupcause_id(cdr['variables']['hangup_cause_q850']) #Retrieve Element from CDR Object data_element = get_element(cdr) accountcode = data_element['accountcode'] remote_media_ip = data_element['remote_media_ip'] caller_id_number = data_element['caller_id_number'] caller_id_name = data_element['caller_id_name'] duration = data_element['duration'] billsec = data_element['billsec'] direction = data_element['direction'] uuid = data_element['uuid'] # Prepare global CDR cdr_record = { 'switch_id': switch.id, 'caller_id_number': caller_id_number, 'caller_id_name': caller_id_name, 'destination_number': destination_number, 'duration': duration, 'billsec': billsec, 'hangup_cause_id': hangup_cause_id, 'accountcode': accountcode, 'direction': direction, 'uuid': uuid, 'remote_media_ip': remote_media_ip, 'start_uepoch': start_uepoch, #'answer_uepoch': answer_uepoch, #'end_uepoch': end_uepoch, #'mduration': cdr['variables']['mduration'], #'billmsec': cdr['variables']['billmsec'], #'read_codec': cdr['variables']['read_codec'], #'write_codec': cdr['variables']['write_codec'], 'cdr_type': CDR_TYPE["freeswitch"], 'cdr_object_id': cdr['_id'], 'country_id': country_id, 'authorized': authorized, } # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) # Count CDR import count_import = count_import + 1 local_count_import = local_count_import + 1 # print_shell(shell, "Sync CDR (cid:%s, dest:%s, dur:%s, " \ # " hg:%s,country:%s, auth:%s, row_count:%s)" % ( # caller_id_number, # destination_number, # duration, # cdr['variables']['hangup_cause_q850'], # country_id, # authorized, # count_import)) # DAILY_ANALYTIC daily_date = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # DAILY_ANALYTIC daily_date = datetime.datetime.fromtimestamp( int(cdr['variables']['start_uepoch'][:10])) # insert daily analytic record create_daily_analytic(daily_date, switch.id, country_id, accountcode, hangup_cause_id, duration) # MONTHLY_ANALYTIC # insert monthly analytic record create_monthly_analytic(daily_date, start_uepoch, switch.id, country_id, accountcode, duration) # Flag the CDR as imported importcdr_handler.update( {'_id': cdr['_id']}, { '$set': { 'import_cdr': 1, } } ) if local_count_import > 0: # Bulk cdr list insert into cdr_common CDR_COMMON.insert(cdr_bulk_record) # Reset counter to zero local_count_import = 0 print_shell(shell, "Switch(%s) - currently imported CDRs:%d" % (ipaddress, count_import)) print_shell(shell, "Import on Switch(%s) - Total Record(s) imported:%d" % (ipaddress, count_import))
def run_alarm(alarm_obj, logger): """ Perform Alarm Check """ running_alarm_test_data = { 'running_alarm_status': True, 'current_value': None, 'previous_value': None, } user = False switch_id = 0 if alarm_obj.type == ALARM_TYPE.ALOC: # ALOC (average length of call) logger.debug('ALOC (Average Length Of Call)') # return start and end date of previous/current day date_dict = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data start_date = date_dict['p_start_date'] end_date = date_dict['p_end_date'] daily_data = get_report_cdr_per_switch(user, 'day', start_date, end_date, switch_id) total_calls = daily_data["nbcalls"]["total"] total_duration = daily_data["duration"]["total"] ACD = math.floor(total_duration / total_calls) if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: running_alarm_test_data['previous_value'] = ACD chk_alert_value(alarm_obj, ACD) else: previous_date_duration = ACD # Current date data start_date = date_dict['c_start_date'] end_date = date_dict['c_end_date'] daily_data = get_report_cdr_per_switch(user, 'day', start_date, end_date, switch_id) total_calls = daily_data["nbcalls"]["total"] total_duration = daily_data["duration"]["total"] ACD = math.floor(total_duration / total_calls) if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: running_alarm_test_data['current_value'] = ACD chk_alert_value(alarm_obj, ACD) else: current_date_duration = ACD running_alarm_test_data['current_value'] = ACD running_alarm_test_data['previous_value'] = previous_date_duration chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) elif alarm_obj.type == ALARM_TYPE.ASR: # ASR (Answer Seize Ratio) logger.debug('ASR (Answer Seize Ratio)') # return start and end date of previous/current day date_dict = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data start_date = date_dict['p_start_date'] end_date = date_dict['p_end_date'] limit = 10 hangup_cause_id = False # TODO: Regroup the 2 calls to custom_sql_aggr_top_hangup to get the hangup (hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost) = \ custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) pre_total_record = total_calls hangup_cause_id = get_hangupcause_id(hangup_cause_q850) (hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost) = \ custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) pre_total_answered_record = total_calls # pre_total_record should not be 0 pre_total_record = 1 if pre_total_record == 0 else pre_total_record previous_asr = pre_total_answered_record / pre_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: running_alarm_test_data['previous_value'] = previous_asr chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data start_date = date_dict['c_start_date'] end_date = date_dict['c_end_date'] limit = 10 hangup_cause_id = False # TODO: Regroup the 2 calls to custom_sql_aggr_top_hangup to get the hangup (hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost) = \ custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) cur_total_record = total_calls hangup_cause_id = get_hangupcause_id(hangup_cause_q850) (hangup_cause_data, total_calls, total_duration, total_billsec, total_buy_cost, total_sell_cost) = \ custom_sql_aggr_top_hangup(user, switch_id, hangup_cause_id, limit, start_date, end_date) cur_total_answered_record = total_calls # cur_total_record should not be 0 cur_total_record = 1 if cur_total_record == 0 else cur_total_record current_asr = cur_total_answered_record / cur_total_record if alarm_obj.alert_condition == ALERT_CONDITION.IS_LESS_THAN or \ alarm_obj.alert_condition == ALERT_CONDITION.IS_GREATER_THAN: running_alarm_test_data['current_value'] = current_asr chk_alert_value(alarm_obj, current_asr) else: running_alarm_test_data['current_value'] = current_asr running_alarm_test_data['previous_value'] = previous_asr chk_alert_value(alarm_obj, current_asr, previous_asr) return running_alarm_test_data
callerid_number = callerid channel = row[3] duration = set_int_default(row[4], 0) billsec = set_int_default(row[5], 0) ast_disposition = row[6] try: id_disposition = dic_disposition.get( ast_disposition.encode("utf-8"), 0) transdisposition = DISPOSITION_TRANSLATION[id_disposition] except: transdisposition = 0 hangup_cause_id = get_hangupcause_id(transdisposition) accountcode = set_int_default(row[7], '') uniqueid = row[8] start_uepoch = datetime.fromtimestamp(int(row[1])) # Check Destination number destination_number = row[0] destination_data = chk_destination(destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Prepare global CDR cdr_record = { 'switch_id': switch.id,
def run_alarm(alarm_obj, logger): """Alarm object""" if alarm_obj.type == 1: # ALOC (average length of call) logger.debug('ALOC (average length of call)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # Previous date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date'] } # previous date map_reduce pre_total_data = cdr_data.map_reduce(map, reduce, out, query=query_var, finalize=finalfc) pre_total_data = pre_total_data.find().sort([('_id.a_Year', -1), ('_id.b_Month', -1)]) pre_day_data = {} for doc in pre_total_data: pre_date = dt_list['p_start_date'] pre_day_data[pre_date.strftime('%Y-%m-%d')] = \ doc['value']['duration__avg'] if alarm_obj.alert_condition == 1 or \ alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, doc['value']['duration__avg']) else: previous_date_duration = doc['value']['duration__avg'] # Current date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date'] } # current date map_reduce cur_total_data = cdr_data.map_reduce(map, reduce, out, query=query_var, finalize=finalfc) cur_total_data = cur_total_data.find().sort([('_id.a_Year', -1), ('_id.b_Month', -1)]) cur_day_data = {} for doc in cur_total_data: cur_date = dt_list['c_start_date'] cur_day_data[cur_date.strftime('%Y-%m-%d')] = \ doc['value']['duration__avg'] if alarm_obj.alert_condition == 1 or \ alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, doc['value']['duration__avg']) else: current_date_duration = doc['value']['duration__avg'] chk_alert_value(alarm_obj, current_date_duration, previous_date_duration) if alarm_obj.type == 2: # ASR (Answer Seize Ratio) logger.debug('ASR (Answer Seize Ratio)') # return start and end date of previous/current day dt_list = get_start_end_date(alarm_obj.alert_condition_add_on) # hangup_cause_q850 - 16 - NORMAL_CLEARING hangup_cause_q850 = 16 # Previous date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['p_start_date'], '$lte': dt_list['p_end_date'] } pre_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) pre_total_answered_record = cdr_data.find(query_var).count() previous_asr = pre_total_answered_record / pre_total_record if alarm_obj.alert_condition == 1 or \ alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, previous_asr) else: previous_asr = previous_asr # Current date data query_var = {} query_var['start_uepoch'] = { '$gte': dt_list['c_start_date'], '$lte': dt_list['c_end_date'] } cur_total_record = cdr_data.find(query_var).count() query_var['hangup_cause_id'] = get_hangupcause_id(hangup_cause_q850) cur_total_answered_record = cdr_data.find(query_var).count() current_asr = cur_total_answered_record / cur_total_record if alarm_obj.alert_condition == 1 or \ alarm_obj.alert_condition == 2: chk_alert_value(alarm_obj, current_asr) else: chk_alert_value(alarm_obj, current_asr, previous_asr) return True
def import_cdr(shell=False, logger=False): """ Connect to the `import_cdr` Database and import the new CDRs """ count_imported = 0 log_print(logger, shell, "in func import_cdr...") if not check_connection_sql(): log_print(logger, shell, "check_connection_sql - Error Connection") return (False, "Error Connection") # Each time the task is running we will only take CDR_IMPORT_LIMIT records to import # This define the max speed of import, this limit could be changed new_CDRs = CDRImport.objects.using('import_cdr')\ .filter(imported=False)\ .order_by('-id')[:settings.CDR_IMPORT_LIMIT] (list_newcdr, list_cdrid) = ([], []) for call in new_CDRs: # Increment counter count_imported = count_imported + 1 # Get the dialcode dialcode = get_dialcode(call.destination_number, call.dialcode) switch_info = chk_ipaddress(call.switch) # Check Destination number if len( call.destination_number ) <= settings.INTERNAL_CALL or call.destination_number[:1].isalpha(): authorized = 1 country_id = None call_type = CALL_TYPE.INTERNAL else: # TODO: rename verify_auth_dest_number verify_auth_dest_number destination_data = verify_auth_dest_number(call.destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] call_type = CALL_TYPE.INTERNATIONAL # Sanitize direction if call.direction: direction = call.direction else: direction = CALL_DIRECTION.NOTDEFINED # Find the user for given accountcode try: user = AccountCode.objects.get(accountcode=call.accountcode).user except: # Cannot assign accountcode to an existing user, therefore we will assign to an Admin user = User.objects.filter(is_superuser=True)[0] try: user_profile = user.userprofile except: user_profile = UserProfile(user=user) user_profile.save() # Retrieve VoipPlan if user_profile: voipplan_id = user_profile.voipplan_id else: voipplan_id = False print_shell( shell, "VoipPlan doesn't exist for this user/accountcode (%s)" % call.accountcode) if call.buy_rate or call.buy_cost or call.sell_rate or call.sell_cost: buy_rate = call.buy_rate buy_cost = call.buy_cost sell_rate = call.sell_rate sell_cost = call.sell_cost elif voipplan_id: call_rate = calculate_call_cost(voipplan_id, call.destination_number, call.billsec) buy_rate = call_rate['buy_rate'] buy_cost = call_rate['buy_cost'] sell_rate = call_rate['sell_rate'] sell_cost = call_rate['sell_cost'] else: buy_rate = buy_cost = sell_rate = sell_cost = 0 hangup_cause_id = get_hangupcause_id(call.hangup_cause_id) log_print( logger, shell, "Create new CDR -> date:%s - dst:%s - duration:%s - hangup_cause:%s - sell_cost:%s" % (call.starting_date, call.destination_number, str( call.duration), str(hangup_cause_id), str(call.sell_cost))) # Create the new CDR newCDR = CDR( user=user, switch=switch_info['switch'], cdr_source_type=call.cdr_source_type, callid=call.callid, caller_id_number=call.caller_id_number, caller_id_name=call.caller_id_name, destination_number=call.destination_number, dialcode_id=dialcode, starting_date=call.starting_date, duration=call.duration, billsec=call.billsec, progresssec=call.progresssec, answersec=call.answersec, waitsec=call.waitsec, hangup_cause_id=hangup_cause_id, direction=direction, country_id=country_id, authorized=authorized, accountcode='' if call.accountcode is None else call.accountcode, buy_rate=buy_rate, buy_cost=buy_cost, sell_rate=sell_rate, sell_cost=sell_cost, call_type=call_type, data='' if call.extradata is None else call.extradata) list_newcdr.append(newCDR) list_cdrid.append(str(call.id)) if (count_imported % 100) == 0: bulk_create_cdrs(list_newcdr, list_cdrid) (list_newcdr, list_cdrid) = ([], []) # we exit the loop but we might still have some remaining CDRs to push if len(list_newcdr) > 0: bulk_create_cdrs(list_newcdr, list_cdrid) (list_newcdr, list_cdrid) = ([], []) log_print( logger, shell, 'TASK :: run_cdr_import -> func import_cdr count_imported:%d' % count_imported) return (True, count_imported)
def import_cdr(self, request): """Add custom method in django admin view to import CSV file of cdr **Attributes**: * ``form`` - CDR_FileImport * ``template`` - admin/cdr/switch/import_cdr.html **Logic Description**: **Important variable**: * total_rows - Total no. of records in the CSV file * retail_record_count - No. of records which are imported from The CSV file """ opts = Switch._meta app_label = opts.app_label rdr = '' # will contain CSV data msg = '' success_import_list = [] error_import_list = [] type_error_import_list = [] if request.method == 'POST': form = CDR_FileImport(request.user, request.POST, request.FILES) if form.is_valid(): field_list = {} field_notin_list = [] for i in CDR_FIELD_LIST: if int(request.POST[i]) != 0: field_list[i] = int(request.POST[i]) else: field_notin_list.append((i)) # perform sorting & get unique order list countMap = {} for v in field_list.itervalues(): countMap[v] = countMap.get(v, 0) + 1 uni = [(k, v) for k, v in field_list.iteritems() if countMap[v] == 1] uni = sorted(uni, key=lambda uni: uni[1]) # if order list matched with CDR_FIELD_LIST count if len(uni) == len(CDR_FIELD_LIST) - len(field_notin_list): # To count total rows of CSV file records = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') total_rows = len(list(records)) rdr = csv.reader(request.FILES['csv_file'], delimiter=',', quotechar='"') cdr_record_count = 0 #Store cdr in list to insert by bulk cdr_bulk_record = [] local_count_import = 0 PAGE_SIZE = 1000 # Read each Row for row in rdr: if (row and str(row[0]) > 0): row = striplist(row) try: accountcode = '' # extra fields to import caller_id_name = '' direction = 'outbound' remote_media_ip = '' answer_uepoch = '' end_uepoch = '' mduration = '' billmsec = '' write_codec = '' read_codec = '' get_cdr_from_row = {} row_counter = 0 for j in uni: get_cdr_from_row[j[0]] = row[j[1] - 1] #get_cdr_from_row[j[0]] = row[row_counter] caller_id_name = get_value_from_uni( j, row, 'caller_id_name') caller_id_number = get_value_from_uni( j, row, 'caller_id_number') direction = get_value_from_uni( j, row, 'direction') remote_media_ip = get_value_from_uni( j, row, 'remote_media_ip') answer_uepoch = get_value_from_uni( j, row, 'answer_uepoch') end_uepoch = get_value_from_uni( j, row, 'end_uepoch') mduration = get_value_from_uni( j, row, 'mduration') billmsec = get_value_from_uni( j, row, 'billmsec') read_codec = get_value_from_uni( j, row, 'read_codec') write_codec = get_value_from_uni( j, row, 'write_codec') row_counter = row_counter + 1 if len(field_notin_list) != 0: for i in field_notin_list: if i == 'accountcode' and request.POST.get( "accountcode_csv"): accountcode = request.POST[ "accountcode_csv"] if not accountcode and request.POST.get( "accountcode") != '0': accountcode = get_cdr_from_row[ 'accountcode'] # Mandatory fields to import switch_id = int(request.POST['switch']) caller_id_number = get_cdr_from_row[ 'caller_id_number'] duration = int(get_cdr_from_row['duration']) billsec = int(get_cdr_from_row['billsec']) if request.POST.get('import_asterisk') \ and request.POST['import_asterisk'] == 'on': hangup_cause_name = "_".join( get_cdr_from_row['hangup_cause_id']. upper().split(' ')) hangup_cause_id =\ get_hangupcause_id_from_name(hangup_cause_name) else: hangup_cause_id =\ get_hangupcause_id(int(get_cdr_from_row['hangup_cause_id'])) start_uepoch = \ datetime.datetime.fromtimestamp(int(float(get_cdr_from_row['start_uepoch']))) destination_number = get_cdr_from_row[ 'destination_number'] uuid = get_cdr_from_row['uuid'] destination_data = chk_destination( destination_number) authorized = destination_data['authorized'] country_id = destination_data['country_id'] # Extra fields to import if answer_uepoch: answer_uepoch = \ datetime.datetime.fromtimestamp(int(answer_uepoch[:10])) if end_uepoch: end_uepoch = \ datetime.datetime.fromtimestamp(int(end_uepoch[:10])) # Prepare global CDR cdr_record = generate_global_cdr_record( switch_id, caller_id_number, caller_id_name, destination_number, duration, billsec, hangup_cause_id, accountcode, direction, uuid, remote_media_ip, start_uepoch, answer_uepoch, end_uepoch, mduration, billmsec, read_codec, write_codec, 'CSV_IMPORT', '', country_id, authorized) # check if cdr is already existing in cdr_common cdr_data = settings.DBCON[ settings.MONGO_CDRSTATS['CDR_COMMON']] query_var = {} query_var['uuid'] = uuid record_count = cdr_data.find(query_var).count() if record_count >= 1: msg = _('CDR already exists !!') error_import_list.append(row) else: # if not, insert record # record global CDR # Append cdr to bulk_cdr list cdr_bulk_record.append(cdr_record) local_count_import = local_count_import + 1 if local_count_import == PAGE_SIZE: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] date_start_uepoch = get_cdr_from_row[ 'start_uepoch'] common_function_to_create_analytic( date_start_uepoch, start_uepoch, switch_id, country_id, accountcode, hangup_cause_id, duration) cdr_record_count = cdr_record_count + 1 msg =\ _('%(cdr_record_count)s Cdr(s) are uploaded, out of %(total_rows)s row(s) !!')\ % {'cdr_record_count': cdr_record_count, 'total_rows': total_rows} success_import_list.append(row) except: msg = _("Error : invalid value for import") type_error_import_list.append(row) # remaining record if cdr_bulk_record: CDR_COMMON.insert(cdr_bulk_record) local_count_import = 0 cdr_bulk_record = [] if cdr_record_count > 0: # Apply index apply_index(shell=True) else: msg = _("Error : importing several times the same column") else: form = CDR_FileImport(request.user) ctx = RequestContext( request, { 'title': _('Import CDR'), 'form': form, 'opts': opts, 'model_name': opts.object_name.lower(), 'app_label': app_label, 'rdr': rdr, 'msg': msg, 'success_import_list': success_import_list, 'error_import_list': error_import_list, 'type_error_import_list': type_error_import_list, 'CDR_FIELD_LIST': list(CDR_FIELD_LIST), 'CDR_FIELD_LIST_NUM': list(CDR_FIELD_LIST_NUM), }) template = 'admin/cdr/switch/import_cdr.html' return render_to_response(template, context_instance=ctx)