Esempio n. 1
0
def save_rep_role(rep_id, rep_role, master_load_id):
    """ Saves the rep role. """
    execute_sp(
        'UMA_TELECOM.SAVE_D_REP_ROLE', {
            'D_REP_ID': rep_id,
            'REP_ROLE_ID': rep_role['$id'],
            'REP_ROLE_roleId': rep_role['roleId'],
            'REP_ROLE_name': rep_role['name'],
            'REP_ROLE_id_ALTERNATE': rep_role['id'],
            'REP_ROLE_dateAdded': format_date(parse_date(
                rep_role['dateAdded'])),
            'LOAD_HISTORY_PKID': master_load_id
        })
Esempio n. 2
0
def save_rep_skill(rep_id, rep_skill, master_load_id):
    """ Saves the rep skill. """
    execute_sp(
        'UMA_TELECOM.SAVE_D_REP_SKILL', {
            'D_REP_ID': rep_id,
            'REP_SKILL_ID': rep_skill['$id'],
            'REP_SKILL_displayName': rep_skill['displayName'],
            'REP_SKILL_proficiency': rep_skill['proficiency'],
            'REP_SKILL_desireToUse': rep_skill['desireToUse'],
            'REP_SKILL_id_ALTERNATE': rep_skill['id'],
            'REP_SKILL_dateAdded': format_date(
                parse_date(rep_skill['dateAdded'])),
            'LOAD_HISTORY_PKID': master_load_id
        })
Esempio n. 3
0
def opsgenie_alert():
    config = get_config()

    results = execute_sp('MWH.Ops_Gene_Alert_Check', {})
    if len(results) < 1 or len(results[0]) < 1:
        log('No Alert found.')
        return

    error = results[0][0]
    if not error or 'error_message' not in error or error[
            'error_message'] == '':
        log('No Alert found.')
        return

    init_opsgenie({
        'OPSGENIE_API_KEY': config['OPSGENIE_API_KEY'],
        'OPSGENIE_GENIE_KEY': config['OPSGENIE_GENIE_KEY'],
        'IS_PRODUCTION': config['IS_PRODUCTION']
    })

    alert = send_alert_for_error(error)
    if not alert:
        log('No OpsGenie alert sent.')
    else:
        log('OpsGenie alert sent:')
        for key in error:
            log(f'{key}: {error[key]}')

        log('')
        log('OpsGenie alert response:')
        log(f'Request ID: {alert.request_id}')
        log(f'Result: {alert.result}')
        log(f'Took: {alert.took}')
Esempio n. 4
0
def save_rep(rep, master_load_id):
    """ Saves the rep data. """
    results = execute_sp(
        'UMA_TELECOM.SAVE_D_REP', {
            'REP_ID': rep['$id'],
            'REP_userId': rep['userId'],
            'REP_homeSite': rep['homeSite'],
            'REP_firstName': rep['firstName'],
            'REP_lastName': rep['lastName'],
            'REP_displayName': rep['displayName'],
            'REP_ntDomainUser': rep['ntDomainUser'],
            'REP_extension': rep['extension'],
            'REP_outboundANI': rep['outboundANI'],
            'REP_id_LIST': rep['id'],
            'REP_customAttributes': _get_custom_attributes(
                rep['customAttributes']),
            'REP_dateAdded': format_date(parse_date(rep['dateAdded'])),
            'LOAD_HISTORY_PKID': master_load_id
        },
        out_arg='rep_id')

    result = get_sp_first_result_set(results)
    if not result:
        return False

    return result['rep_id']
Esempio n. 5
0
def start_process():
    """ Calls the stored procedure to start the process. """
    results = execute_sp('UMA_TELECOM.PROCESS_API_DATA', {'MESSAGE': 'START'})

    result = get_sp_first_result_set(results)
    if not result:
        return False

    return result['return_value']
Esempio n. 6
0
def save_rep_workgroup(rep_id, rep_workgroup, master_load_id):
    """ Saves the rep workgroup. """
    execute_sp(
        'UMA_TELECOM.SAVE_D_REP_WORKGROUP', {
            'D_REP_ID':
            rep_id,
            'REP_WORKGROUP_ID':
            rep_workgroup['$id'],
            'REP_WORKGROUP_Name':
            rep_workgroup['name'],
            'REP_WORKGROUP_customAttributes':
            _get_custom_attributes(rep_workgroup['customAttributes']),
            'REP_WORKGROUP_id_ALTERNATE':
            rep_workgroup['id'],
            'REP_WORKGROUP_dateAdded':
            format_date(parse_date(rep_workgroup['dateAdded'])),
            'LOAD_HISTORY_PKID':
            master_load_id
        })
Esempio n. 7
0
def stop_process(master_load_id):
    """ Calls the stored procedure to stop the process. """
    results = execute_sp('UMA_TELECOM.PROCESS_API_DATA', {
        'MESSAGE': 'FINISHED',
        'LOAD_HIST_PKID_IN': master_load_id
    })

    result = get_sp_first_result_set(results)
    if not result:
        return False

    return result
Esempio n. 8
0
def get_last_row(filepath, filename):
    start_at_result = execute_sp('MWH_FILES.MANAGE_CSV_DATA', {
        'message': 'GET_LAST_ROW',
        'PATH': filepath,
        'FILE_NAME': filename,
        'COLUMN_NAME': '',
        'COLUMN_POSITION': '',
        'ROW_NUMBER': '',
        'VALUE': '',
        'FILE_LAST_MODIFIED_DTTM': '',
        'FILE_SIZE_IN_BYTES': ''
    },
                                 out_arg=out_arg)
    return start_at_result[0][0]['last_row']
Esempio n. 9
0
def process_yaml_data(file, task_id=''):
    if os.path.exists(file) is False:
        raise FileExistsError(f"{file} is an invalid file.")

    # Get the file contents
    with open(file) as fp:
        contents = fp.read()

    # Parse the file contents
    yaml_data = yaml.load(contents)['dictionary']

    total = 0
    source_insert_count = 0
    source_update_count = 0
    calculate_insert_count = 0
    calculate_update_count = 0
    error_count = 0

    log(f"Processing ...")

    yaml_keys = yaml_data.keys()

    if task_id:
        execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                                   'START_PROCESSING_SCHEDULE_TASK',
                                   str(task_id), str(len(yaml_keys)))

    for key in yaml_keys:
        tmp_item = yaml_data[key]
        if 'type' not in tmp_item:
            tmp_item['type'] = 'unknown'

        if 'description' not in tmp_item:
            tmp_item['description'] = 'N/A'

        items = []
        if 'calculate' in tmp_item:
            sources = tmp_item['calculate'].split(' or ')
            for source in sources:
                items.append({
                    'entry_type': 'calculate',
                    'source': source,
                    'type': tmp_item['type'],
                    'description': tmp_item['description']
                })
        else:
            tmp_item['entry_type'] = 'source'
            items.append(tmp_item)

        for item in items:
            result = execute_sp(
                'MWH_FILES.MANAGE_CollegeScorecard_Dictionary', {
                    'message': 'SAVE',
                    'DICTIONARY_ENTRY_TYPE': item['entry_type'],
                    'ENTRY_NAME': key,
                    'COLUMN_NAME': item['source'],
                    'ENTRY_DATA_TYPE': item['type'],
                    'ENTRY_DESCRIPTION': item['description']
                },
                out_arg='return_flg')

            result_count = len(result)
            processed = result[result_count - 1][0]['return_flg']

            total += 1

            if processed == 1:
                source_insert_count += 1
            elif processed == 2:
                source_update_count += 1
            elif processed == 3:
                calculate_insert_count += 1
            elif processed == 4:
                calculate_update_count += 1
            else:
                error_count += 1

    if task_id:
        execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                                   'FINISHED_PROCESSING_SCHEDULE_TASK',
                                   str(task_id), str(total))

    log("")
    log(f"TOTAL: {format_number(total)}")
    log(f"SOURCE INSERT COUNT: {format_number(source_insert_count)}")
    log(f"SOURCE UPDATE COUNT: {format_number(source_update_count)}")
    log(f"CALCULATE INSERT COUNT: {format_number(calculate_insert_count)}")
    log(f"CALCULATE UPDATE COUNT: {format_number(calculate_update_count)}")
    log(f"ERROR COUNT: {format_number(error_count)}")
Esempio n. 10
0
def process_spreadsheet_data(file, row_limit_display=100, task_id=''):
    if os.path.exists(file) is False:
        raise FileExistsError(f"{file} is an invalid file.")

    filename = ntpath.basename(file)
    filepath = ntpath.dirname(file)
    file_size = os.path.getsize(file)
    file_last_modified = datetime.datetime.fromtimestamp(
        os.path.getmtime(file))
    file_last_modified_str = file_last_modified.strftime('%Y-%m-%d %H:%M:%S')
    file_exists = False

    start_at = 1
    exists = execute_sp('MWH_FILES.MANAGE_CSV_DATA', {
        'message': 'CHECK_IF_EXISTS',
        'PATH': filepath,
        'FILE_NAME': filename,
        'COLUMN_NAME': '',
        'COLUMN_POSITION': '',
        'ROW_NUMBER': '',
        'VALUE': '',
        'FILE_LAST_MODIFIED_DTTM': '',
        'FILE_SIZE_IN_BYTES': ''
    },
                        out_arg=out_arg)

    if len(exists[0]) > 0 and file_last_modified.date(
    ) == exists[0][0]['last_modified_dttm'].date(
    ) and file_size == exists[0][0]['file_size']:
        file_exists = True
        start_at = get_last_row(filepath, filename)

    execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                               'TASK_REQUEST_CHECK', str(task_id),
                               'EXISTING FILE' if file_exists else 'NEW FILE')

    total = 0
    insert_count = 0
    update_count = 0
    null_count = 0
    error_count = 0

    # CSV file columns
    columns = read_workbook_columns(file)

    # CSV file rows
    rows = read_workbook_data(file)

    totals_rows = len(rows)
    if file_exists and start_at == totals_rows:
        if task_id:
            execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                                       'FINISHED_PROCESSING_SCHEDULE_TASK',
                                       str(task_id), '0')

        log('File already exists. Nothing new to process.')
        return

    if task_id:
        execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                                   'START_PROCESSING_SCHEDULE_TASK',
                                   str(task_id), str(totals_rows))

    for row_num, row in enumerate(rows):
        curr_row = row_num + 1
        to_row = row_num + row_limit_display

        if to_row >= totals_rows:
            to_row = totals_rows

        if row_num % row_limit_display == 0:
            log(f"{get_now_datetime()}: processing rows {format_number(curr_row)} to {format_number(to_row)} of {format_number(totals_rows)}"
                )

        if curr_row < start_at:
            continue

        for col_pos, col in enumerate(columns):
            value = row[col_pos]
            value_norm = value.lower()
            if value_norm == 'null' or value_norm == 'PrivacySuppressed':
                processed = 3
            else:
                result = execute_sp('MWH_FILES.MANAGE_CSV_DATA', {
                    'message': 'SAVE',
                    'PATH': filepath,
                    'FILE_NAME': filename,
                    'COLUMN_NAME': col,
                    'COLUMN_POSITION': str(col_pos + 1),
                    'ROW_NUMBER': str(row_num + 1),
                    'VALUE': value,
                    'FILE_LAST_MODIFIED_DTTM': file_last_modified_str,
                    'FILE_SIZE_IN_BYTES': file_size
                },
                                    out_arg=out_arg)

                processed = result[len(result) - 1][0][out_arg]

            total += 1

            if processed == 1:
                insert_count += 1
            elif processed == 2:
                update_count += 1
            elif processed == 3:
                null_count += 1
            else:
                error_count += 1

    if task_id:
        execute_scheduled_tasks_sp('MWH.MANAGE_SCHEDULE_TASK_JOBS',
                                   'FINISHED_PROCESSING_SCHEDULE_TASK',
                                   str(task_id), str(total))

    log("")
    log(f"TOTAL: {format_number(total)}")
    log(f"INSERT COUNT: {format_number(insert_count)}")
    log(f"UPDATE COUNT: {format_number(update_count)}")
    log(f"NULL COUNT: {format_number(null_count)}")
    log(f"ERROR COUNT: {format_number(error_count)}")