示例#1
0
 def opener(self):
     handlers = []
     if TVH_AUTH == 'digest':
         handlers.append(urllib.HTTPDigestAuthHandler(PWD_MGR))
     elif TVH_AUTH == 'basic':
         handlers.append(urllib.HTTPBasicAuthHandler(PWD_MGR))
     else:
         handlers.append(urllib.HTTPDigestAuthHandler(PWD_MGR))
         handlers.append(urllib.HTTPBasicAuthHandler(PWD_MGR))
     if DEBUG:
         handlers.append(urllib.HTTPSHandler(debuglevel=1))
     return urllib.build_opener(*handlers)
示例#2
0
    def __init__(self,
                 username='',
                 password='',
                 login_url='',
                 auth_type='digest',
                 rets_version='RETS/1.7.2',
                 user_agent='RETSDK/1.0'):
        """
        Sets up a connection to a RETS server and loads account options
        """
        self.headers = {'User-Agent': user_agent, 'RETS-Version': rets_version}

        # Get a base URL from the login URL
        parsed_url = urlparse(login_url)
        if parsed_url.scheme and parsed_url.netloc:
            base_url = parsed_url.scheme + "://" + parsed_url.netloc
        else:
            url_msg = "{0} is not a valid RETS Login URL".format(login_url)
            raise AuthenticationError(url_msg)

        # Setup an opener that can handle authentication
        pw_mgr = request.HTTPPasswordMgrWithDefaultRealm()
        pw_mgr.add_password(None, base_url, username, password)

        if auth_type == 'digest':
            handler = request.HTTPDigestAuthHandler(pw_mgr)
        elif auth_type == 'basic':
            handler = request.HTTPBasicAuthHandler(pw_mgr)
        else:
            raise AuthenticationError("auth_type must be 'basic' or 'digest'")

        opener = request.build_opener(handler)
        request.install_opener(opener)

        # Perform a login request to get server & account info
        login_response = self.__login(login_url)

        for option in login_response['rows']:
            for key, val in option.items():
                if key == 'MetadataVersion':
                    self.metadata_version = val
                if key == 'MetadataTimestamp':
                    self.metadata_timestamp = val
                if key == 'MinMetadataTimestamp':
                    self.min_metadata_timestamp = val
                if key == 'Login':
                    self.login_url = val
                if key == 'Logout':
                    self.logout_url = val
                if key == 'Search':
                    self.search_url = val
                if key == 'GetMetadata':
                    self.get_metadata_url = val
                if key == 'GetObject':
                    self.get_object_url = val
                if key == 'Update':
                    self.update_url = val
                if key == 'PostObject':
                    self.post_object_url = val
示例#3
0
文件: http.py 项目: project-asap/ewrt
 def _getHTTPDigestAuthOpener(url, user, pwd):
     '''
     returns an opener, capable of handling http-digest authentification
     '''
     passwdmngr = urllib2.HTTPPasswordMgrWithDefaultRealm()
     passwdmngr.add_password('realm', url, user, pwd)
     auth_handler = urllib2.HTTPDigestAuthHandler(passwdmngr)
     return auth_handler
示例#4
0
def get_page(url, user, pw):
    password_manager = url_src.HTTPPasswordMgrWithDefaultRealm()
    password_manager.add_password(None, url, user, pw)
    authhandler = url_src.HTTPDigestAuthHandler(password_manager)
    opener = url_src.build_opener(authhandler)
    url_src.install_opener(opener)
    with closing(url_src.urlopen(url)) as response:
        return response.read()
示例#5
0
def metrics_collector():
    resultjson = {}
    try:
        url = "https://cloud.mongodb.com/api/atlas/v1.0/groups/" + group_id + "/clusters/Cluster0?pretty=true"
        if public_key and private_key:
            password_mgr = urlconnection.HTTPPasswordMgrWithDefaultRealm()
            password_mgr.add_password(None, url, public_key, private_key)
            auth_handler = urlconnection.HTTPDigestAuthHandler(password_mgr)

        if auth_handler is not None:
            opener = urlconnection.build_opener(auth_handler)
            urlconnection.install_opener(opener)

        data = urlconnection.urlopen(url).read()
        data = json.loads(data)
        new_data = {}
        new_data["clustertype"] = data["clusterType"]
        new_data["disksize"] = data["diskSizeGB"]
        new_data["mongodb_majorversion"] = data["mongoDBMajorVersion"]
        new_data["mongodb_version"] = data["mongoDBVersion"]
        new_data["mongo_uri_updated"] = data["mongoURIUpdated"]
        new_data["name"] = data["name"]
        new_data["numshards"] = data["numShards"]
        new_data["pitenabled"] = data["pitEnabled"]
        new_data["provider_backup_enabled"] = data["providerBackupEnabled"]
        new_data["providername"] = data["providerSettings"]["providerName"]
        new_data["maxinstance_size"] = data["providerSettings"]["autoScaling"][
            "compute"]["maxInstanceSize"]
        new_data["mininstance_size"] = data["providerSettings"]["autoScaling"][
            "compute"]["minInstanceSize"]
        new_data["replication_factor"] = data["replicationFactor"]
        new_data["analytics_nodes"] = data["replicationSpec"]["ASIA_SOUTH_1"][
            "analyticsNodes"]
        new_data["electable_nodes"] = data["replicationSpec"]["ASIA_SOUTH_1"][
            "electableNodes"]
        new_data["priority"] = data["replicationSpec"]["ASIA_SOUTH_1"][
            "priority"]
        new_data["readonly_nodes"] = data["replicationSpec"]["ASIA_SOUTH_1"][
            "readOnlyNodes"]
        new_data["zonename"] = data["replicationSpecs"][0]["zoneName"]
        new_data["rootcert_type"] = data["rootCertType"]
        new_data["srvaddress"] = data["srvAddress"]
        new_data["statename"] = data["stateName"]

        return new_data

    except Exception as e:
        resultjson["msg"] = str(e)
        resultjson["status"] = 0
    return resultjson
def metrics_collector():
    resultjson = {}
    try:
        url = "https://cloud.mongodb.com/api/atlas/v1.0/groups/" + group_id + "/processes/" + host + ":" + port + "/measurements?granularity=PT5M&period=PT5M&pretty=true"
        if public_key and private_key:
            password_mgr = urlconnection.HTTPPasswordMgrWithDefaultRealm()
            password_mgr.add_password(None, url, public_key, private_key)
            auth_handler = urlconnection.HTTPDigestAuthHandler(password_mgr)

        if auth_handler is not None:
            opener = urlconnection.build_opener(auth_handler)
            urlconnection.install_opener(opener)

        data = urlconnection.urlopen(url).read()
        data = json.loads(data)
        new_data = {}
        new_data["groupid"] = data["groupId"]
        new_data["hostid"] = data["hostId"]
        new_data["start"] = data["start"]
        new_data["end"] = data["end"]
        new_data["connections"] = data["measurements"][0]["dataPoints"][0][
            "value"]
        new_data["network_in"] = data["measurements"][1]["dataPoints"][0][
            "value"]
        new_data["network_out"] = data["measurements"][2]["dataPoints"][0][
            "value"]
        new_data["network_request"] = data["measurements"][3]["dataPoints"][0][
            "value"]
        new_data["opcounter_cmd"] = data["measurements"][4]["dataPoints"][0][
            "value"]
        new_data["opcounter_query"] = data["measurements"][5]["dataPoints"][0][
            "value"]
        new_data["opcounter_update"] = data["measurements"][6]["dataPoints"][
            0]["value"]
        new_data["opcounter_delete"] = data["measurements"][7]["dataPoints"][
            0]["value"]
        new_data["opcounter_getmore"] = data["measurements"][8]["dataPoints"][
            0]["value"]
        new_data["opcounter_insert"] = data["measurements"][9]["dataPoints"][
            0]["value"]
        new_data["logicalsize"] = data["measurements"][10]["dataPoints"][0][
            "value"]

        return new_data
    except Exception as e:
        resultjson["msg"] = str(e)
        resultjson["status"] = 0
        import traceback
        traceback.print_exc()
    return resultjson
示例#7
0
    def _install_auth(self):
        """setup digest auth"""
        realm = self._get_realm()

        uri = self.oob_info["ipmi"]
        username = self.username
        password = self.password

        auth_handler = request.HTTPDigestAuthHandler()
        auth_handler.add_password(realm=realm,
                                  uri=uri,
                                  user=username,
                                  passwd=password)
        opener = request.build_opener(auth_handler)
        request.install_opener(opener)
示例#8
0
def wiki_scrapy():
    try:
        auth = urllib2.HTTPDigestAuthHandler()
        auth.add_password('')

        time.sleep(2)
        url = ""
        req = requests.get(url, timeout=10, auth=HTTPDigestAuth())
        soup = BeautifulSoup(req.text, 'html.parser', from_encoding="EUC-JP")

        find_div = soup.find('div', {
            'id': 'menubar'
        }).find('div').find_all('a')
        link_box_all = []
        for lists in find_div:
            title = lists.get('title')
            link = lists.get('href')
            link_box = [title, link]
            link_box_all.append(link_box)
    except Exception:
        link_box_all = [['No Data or error', '']]

    # print(link_box_all)

    try:
        find_div2 = soup.find('div', {'id': 'menubar'}).find('div', {'id': 'treeview'}) \
            .find_all('li', {'class': 'collapsed'})
        for i in range(len(find_div2)):
            zemi = find_div2[i].find('a', {'title': 'ゼミ'})
            # print(type(zemi))
            # print(isinstance(zemi, type(None)))
            if not isinstance(zemi, type(None)):
                find_in_col = find_div2[i].find('ul', {
                    'style': 'display:none;'
                }).find_all('a')
                break
        link_box_all2 = []
        for lists2 in find_in_col:
            title2 = lists2.get('title')
            link2 = lists2.get('href')
            link_box2 = [title2, link2]
            link_box_all2.append(link_box2)
    except Exception:
        link_box_all2 = [['No Data or error', '']]

    try:
        list_temp = [data_str[0] for data_str in link_box_all2]
        date_list = [str_temp.split('/')[1] for str_temp in list_temp]
        today_reg = time.strftime("%Y-%m-%d", time.localtime())
        date_list_new = date_list + [today_reg]
        date_list_sorted = sorted(date_list_new)
        today_index = date_list_sorted.index(today_reg)
        show_date_index = today_index + 1
        next_zemi_date = date_list_sorted[
            show_date_index]  # find the next zemi date
        url_index = date_list.index(next_zemi_date)
        next_zemi_url = "" + (link_box_all2[url_index][1])[1:]
    except Exception:
        next_zemi_date = "x"

    try:
        time.sleep(2)
        # number_list = []
        # zemi_content_list = []
        req_next = requests.get(next_zemi_url,
                                timeout=10,
                                auth=HTTPDigestAuth())
        soup_next = BeautifulSoup(req_next.text,
                                  'html.parser',
                                  from_encoding="EUC-JP")
        zemi_title = soup_next.find('td', {
            'valign': 'top'
        }).find('h3', {
            'id': 'content_1_0'
        }).text
        # zemi_content = soup_next.find('td', {'valign': 'top'}).find_all('ul', {'class': 'list1'})
        zemi_content2 = soup_next.find('td', {
            'valign': 'top'
        }).find('div', {
            'id': 'body'
        }).find_all('ul', {'class': 'list1'})

    # print(number_list)
    except Exception:
        zemi_title = "No title"
        zemi_content2 = ["No data of next zemi content."]
        next_zemi_url = "#"
    #     number_list = ["Nobody"]

    try:
        time.sleep(2)
        url_rss = ""
        rss_content = requests.get(url_rss, timeout=10, auth=HTTPDigestAuth())
        soup_rss = BeautifulSoup(rss_content.text,
                                 'html.parser',
                                 from_encoding="EUC-JP")
        first_rss = soup_rss.find('dc:date').text
        # first_rss
        # print('rss',first_rss)
    except Exception:
        first_rss = ""
        # pass
    # print("---->", zemi_title)
    # print("---->", zemi_content_list)
    # print("---->", number_list)
    return link_box_all, next_zemi_date, next_zemi_url, zemi_content2, zemi_title, first_rss