def run():
    page = requests.get(constants.URL_CLOUD_HEALTH)
    soup = BeautifulSoup(page.content, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_CLOUD_HEALTH,
        'sourceUrl': constants.URL_CLOUD_HEALTH,
        'sourceStatus': constants.STATUS_GOOD,
    })
    try:
        inner_containers = soup.find_all('div',
                                         class_='component-inner-container')

        for container in inner_containers:
            service_name = container.find_all('span',
                                              class_='name')[0].text.strip()
            service_value = container.find_all(
                'span', class_='component-status')[0].text.strip()
            json_template['services'].append({
                'name': service_name,
                'value': service_value
            })

        # print json.dumps(json_template)
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s',
                     constants.SOURCE_CLOUD_HEALTH,
                     exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 2
0
def run():
    page = requests.get(constants.URL_AWS)
    soup = BeautifulSoup(page.content, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_AWS,
        'sourceUrl': constants.URL_AWS,
        'sourceStatus': constants.STATUS_GOOD,
    })

    try:
        service_status = constants.STATUS_GOOD
        status_set = set()

        for html_tag in html_tags:
            html_element = soup.find(html_tag[0], html_tag[1])
            html_tables = html_element.find_all('table',
                                                attrs={'class': 'fullWidth'})

            if len(html_tables) == 2:
                table = html_tables[1]
                table_body = table.find('tbody')

                rows = table_body.find_all('tr')
                for row in rows:
                    cols = row.find_all('td')
                    service_name = cols[1].text.strip()
                    service_value = cols[2].text.strip()
                    # if service_value.lower() in [DEGRADATIONAL_STATE, DISTRUPTIONAL_STATE]:
                    #     service_status = constants.STATUS_CRITICAL
                    status_set.add(status_dict.get(service_value.lower()))

                    json_template['services'].append({
                        'name': service_name,
                        'value': service_value
                    })

        if constants.STATUS_CRITICAL in status_set:
            service_status = constants.STATUS_CRITICAL
        elif constants.STATUS_WARNING in status_set:
            service_status = constants.STATUS_WARNING
        json_template['sourceStatus'] = service_status

        # print json.dumps(json_template)
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s', constants.SOURCE_AWS, exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 3
0
def run():
    page = requests.get(constants.URL_APICA)
    soup = BeautifulSoup(page.content, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_APICA,
        'sourceUrl': constants.URL_APICA,
        'sourceStatus': constants.STATUS_GOOD,
    })
    try:
        service_status = constants.STATUS_GOOD
        status_dict = {
            'up': 0,
            'issue': 0,
            'down': 0
        }
        inner_containers = soup.find('div', class_='general_stat')
        service_circles = inner_containers.find_all('div', class_='service_circle')

        for circle in service_circles:
            service_n_status = circle.text.strip().split('\n')
            status_dict[service_n_status[1]] = int(service_n_status[0])
            # json_template['services'].append({
            #     'name': service_name,
            #     'value': service_value
            # })

        if status_dict.get('down') > 0:
            service_status = constants.STATUS_CRITICAL
        elif status_dict.get('issue') > 0:
            service_status = constants.STATUS_WARNING

        json_template['sourceStatus'] = service_status
        json_template['services'].append({
            'name': 'apica',
            'value': service_status
        })
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s', constants.SOURCE_APICA, exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 4
0
def run():
    driver.get(constants.URL_NETSUITE)
    soup = BeautifulSoup(driver.page_source, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_NETSUITE,
        'sourceUrl': constants.URL_NETSUITE,
        'sourceStatus': constants.STATUS_GOOD,
    })

    try:
        service_status = constants.STATUS_GOOD

        table = soup.find('table', attrs={'id': 'weekly-status'})
        table_body = table.find('tbody')

        rows = table_body.find_all('tr')
        for row in rows:
            cols = row.find_all('td')
            service_name = cols[0].text.strip()
            service_value_icon = cols[1].find('svg').get('class')[1]
            service_value = status_icon_dict[service_value_icon]

            if service_value == constants.STATUS_CRITICAL:
                service_status = service_value

            json_template['services'].append({
                'name': service_name,
                'value': service_value
            })

        json_template['sourceStatus'] = service_status

        # print json.dumps(json_template)
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s', constants.SOURCE_NETSUITE, exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 5
0
def run():
    driver.get(constants.URL_OKTA)
    soup = BeautifulSoup(driver.page_source, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_OKTA,
        'sourceUrl': constants.URL_OKTA,
        'sourceStatus': constants.STATUS_GOOD,
    })

    try:
        service_status = None
        elements = []
        for tags in html_tags:
            elements = soup.find_all(tags[0], class_=tags[1])
            if elements:
                break

        if elements:
            service_status = elements[0].text.strip()

        # service_value = service_states_dict[service_name_and_status]

        json_template['services'].append({
            'name': 'okta',
            'value': service_status
        })

        if service_status.lower() != HAPPY_STATE:
            json_template['sourceStatus'] = constants.STATUS_CRITICAL

        # print json.dumps(json_template)
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s', constants.SOURCE_OKTA, exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 6
0
def run():
    driver.get(constants.URL_ADOBE)
    soup = BeautifulSoup(driver.page_source, 'html.parser')

    json_template = templates.get_json_template()
    json_template.update({
        'source': constants.SOURCE_ADOBE,
        'sourceUrl': constants.URL_ADOBE,
        'sourceStatus': constants.STATUS_GOOD,
    })
    try:
        service_status = constants.STATUS_GOOD

        cloud_name_list = soup.find_all('span', class_='cloud_name')
        status_list = soup.find_all('span', class_='status_text')

        for index, cloud_name in enumerate(cloud_name_list):
            service_name = cloud_name.text.strip()
            service_value = status_list[index].text.strip()
            json_template['services'].append({
                'name': service_name,
                'value': service_value
            })

            if service_value.lower() != HAPPY_STATE:
                service_status = constants.STATUS_CRITICAL

        json_template['sourceStatus'] = service_status

        # print json.dumps(json_template)
        ES.create_index_data(json_template)
    except Exception:
        logger.error('error parsing %s', constants.SOURCE_ADOBE, exc_info=1)
        logger.error("-" * 100)
        logger.error(unicode(soup))
        logger.error("-" * 100)
        json_template['sourceStatus'] = constants.STATUS_CRITICAL
        ES.create_index_data(json_template)
Esempio n. 7
0
def metrics():
    data = {"data": apicaClient.get_monitor_results_group_view()}
    ES.create_index_data(index='program_data', doc_type='program', body=data)