Example #1
0
def download_url(urlpath, output_dir=".", binary=False):

    http = PoolManager()
    req = http.request("GET", urlpath)
    if req.status != 200:
        raise Exception("Could not get file from " + urlpath)

    parsed = urlparse(urlpath)
    filename = os.path.basename(parsed.path)
    writemod = "wb" if binary else "w"

    contents = req.data
    if output_dir != ".":
        if not os.path.exists(output_dir):
            log.error("{0} does not exist".format(output_dir))
            log.error("Writing file to {0}".format(os.getcwd()))
        else:
            filename = "/".join([output_dir, filename])
    with open(filename, writemod) as downloaded:
        try:
            downloaded.write(contents)
        except TypeError:
            with open(filename, "wb") as downloaded:
                downloaded.write(contents)
    if not os.path.exists(filename):
        raise Exception("Could not write to {}".format(filename))
    return filename
def urllib3_test():
    from urllib3 import PoolManager
    import time, random

    url = ['http://sist.swjtu.edu.cn:8080/MicroElec/feng/index.asp',
           'http://sist.swjtu.edu.cn:8080/MicroElec/feng/contact/contact.asp,'
           'http://sist.swjtu.edu.cn:8080/MicroElec/feng/Templates/%E5%A4%8D%E4%BB%B6%20CSS/',
           'http://sist.swjtu.edu.cn:8080/MicroElec/index.asp',
           'http://sist.swjtu.edu.cn:8080/My%20Documents/Documents%20and%20Settingsjiangtigang%E6%A1%8C%E9%9D%A2ImcGroupHomepageNewImcWebYuan%20Ding"%20l']

    errors = 0
    oks = 0
    p = PoolManager(20)
    for a in url:
        try:
            p.request(method="GET", url=a)
            s = random.randint(0,10)
            time.sleep(s)
            print s
            oks += 1
        except:
            errors += 1
            continue

        print ".",
    print
    print "Errors: %s | Oks: %s." % (str(errors), str(oks))
Example #3
0
def getAmqpStats(helper):
    from urllib3 import PoolManager, util
    try: 
        username = "******"
        password = "******"

        headers = util.make_headers(basic_auth = username + ":" + password)

        http = PoolManager()
        r = http.request("GET", "upsilon:15672/api/channels", headers = headers)

        channels = json.loads(r.data)


        tbl = PrettyTable(["Connection", "username", "Unacked", "Publish"])

        if helper.args.debug:
            print json.dumps(channels, indent = 4);

        for conn in channels:
            tbl.add_row([conn['name'], conn['user'], conn['messages_unacknowledged'], 0])

        print tbl

    except Exception as e:
        print str(e)
Example #4
0
class APISent(object):
    """Sentiment features using API tools.

    Interacts with web and therefore needs urllib3. Might be _very_ slow,
    use with caution and prefrably store features.

    Parameters
    ----------
    mode : string, optional, default 'deep'
        Can be either 'deep' for Twitter-based neural sentiment (py2, boots
        local server instance), or 'nltk' for the text-processing.com API.

    Examples
    --------
    >>> sent = APISent()
    >>> sent.transform("you're gonna have a bad time")
    ... 0.030120761495050809
    >>> sent = APISent(mode='nltk')
    >>> sent.transform("you're gonna have a bad time")
    ...

    Notes
    -----
    Implemented by: Chris Emmery
    Deep sentiment: https://github.com/xiaohan2012/twitter-sent-dnn
    NLTK API: http://text-processing.com
    """

    def __init__(self, mode='deep'):
        """Load poolmanager and set API location."""
        from urllib3 import PoolManager
        self.name = 'apisent'
        self.mode = mode
        self.pool = PoolManager()

    def __str__(self):
        """String representation for APISent."""
        return '''
        feature:    {0}
        mode:       {1}
        '''.format(self.name, self.mode)

    def transform(self, raw, _):
        """Return a dictionary of feature values."""
        if self.mode == 'deep':
            jsf = json.dumps({'text': raw})
            header = {'content-type': 'application/json'}
            request = "http://localhost:6667/api"
            r = self.pool.request('POST', request, headers=header, body=jsf)
            out = {'deepsent': float(r.data.decode('utf-8'))}
        elif self.mode == 'nltk':
            qf = urlencode({'text': raw})
            request = "http://text-processing.com/api/sentiment/"
            r = self.pool.request('POST', request, body=qf)
            try:
                out = json.loads(r.data.decode('utf-8'))["probability"]
            except ValueError:
                exit("SentAPI threw unexpected response, " +
                     "probably reached rate limit.")
        return out
Example #5
0
def check_vul(url):
    """
    Test if a GET to a URL is successful
    :param url: The URL to test
    :return: A dict with the exploit type as the keys, and the HTTP status code as the value
    """
    if gl_args.mode == 'auto-scan' or gl_args.mode == 'file-scan':
        timeout = Timeout(connect=1.0, read=3.0)
        pool = PoolManager(timeout=timeout, retries=1, cert_reqs='CERT_NONE')
    else:
        timeout = Timeout(connect=3.0, read=6.0)
        pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')

    url_check = parse_url(url)
    if '443' in str(url_check.port) and url_check.scheme != 'https':
        url = "https://"+str(url_check.host)+":"+str(url_check.port)

    print(GREEN + "\n ** Checking Host: %s **\n" % url)

    headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
               "Connection": "keep-alive",
               "User-Agent": user_agents[randint(0, len(user_agents) - 1)]}

    paths = {"jmx-console": "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo",
             "web-console" 	: "/web-console/ServerInfo.jsp",
             "JMXInvokerServlet": "/invoker/JMXInvokerServlet",
             "admin-console" : "/admin-console/"}

    for i in paths.keys():
        if gl_interrupted: break
        try:
            print(GREEN + " * Checking %s: \t" % i + ENDC),
            r = pool.request('HEAD', url +str(paths[i]), redirect=False, headers=headers)
            paths[i] = r.status

            # check if it's false positive
            if len(r.getheaders()) == 0:
                print(RED + "[ ERROR ]\n * The server %s is not an HTTP server.\n" % url + ENDC)
                paths = {"jmx-console": 505,
                         "web-console": 505,
                         "JMXInvokerServlet": 505,
                         "admin-console": 505}
                break


            if paths[i] in (301, 302, 303, 307, 308):
                url_redirect = r.get_redirect_location()
                print(GREEN + "[ REDIRECT ]\n * The server sent a redirect to: %s\n" % url_redirect)
            elif paths[i] == 200 or paths[i] == 500:
                if i == "admin-console":
                    print(RED + "[ EXPOSED ]" + ENDC)
                else:
                    print(RED + "[ VULNERABLE ]" + ENDC)
            else:
                print(GREEN + "[ OK ]")
        except:
            print(RED + "\n * An error occurred while connecting to the host %s\n" % url + ENDC)
            paths[i] = 505

    return paths
Example #6
0
 def run(self):
     try:
         response = self.pool.request("GET", self.url, headers=self.headers)
     except HostChangedError, e:
         # Redirect, give up on managing resources ourselves, just get the
         # file
         managed_pool = PoolManager(1)
         response = managed_pool.request("GET", e.url, headers=self.headers)
Example #7
0
def getUrl(url):
    """Возвращает содержимое страницы по переданном url"""
    try:
        http = PoolManager()
        r = http.request('GET', url,
                         timeout=Timeout(connect=2.0, read=5.0),
                         retries=Retry(5, redirect=False)
                         )
        return html.fromstring(r.data)
    except urllib3.exceptions.MaxRetryError:
        print('Превышено максимальное число попыток (5):', url)
        return None
Example #8
0
    def getSequential(self, urls_headers):
        conn_pool = connection_from_url(urls_headers[0][0], maxsize=CONNECTIONS_PER_HOST)
        responses = []

        for url, headers in urls_headers:
            try:
                response = conn_pool.request("GET", url, headers=headers)
            except HostChangedError, e:
                # Redirect, give up on managing resources ourselves, just get the
                # file
                managed_pool = PoolManager(1)
                response = managed_pool.request("GET", e.url, headers=headers)
            responses.append((url, response))
Example #9
0
 class __ProviderManager(QtCore.QObject):
     
     sendError = QtCore.pyqtSignal(str)
     
     def __init__(self):
         QtCore.QObject.__init__(self)
         self.providers = {}
         self.poolManager = PoolManager(timeout=Timeout(10),
                                        headers={'Accept-Encoding': 'gzip,deflate'})
         self.engine = Engine()
     
     def loadProviderFromFile(self,path):
         try:
             providerFile = open(path,mode="r")
             provider = json.loads(providerFile.read())
             providerFile.close()
             self.providers[provider["name"]] = provider
         except Exception as e:
             self.sendError.emit("cannot load provider at '"+path+"' <br/><b>Reason:</b> "+str(e))
             
     def loadProviderFromUrl(self,url):
         try:
             req = self.poolManager.request("GET", url)
             provider = json.loads(req.data.decode('utf-8'))
             self.providers[provider["name"]] = provider
             del req
         except Exception as e:
             self.sendError.emit("cannot load provider at '"+url+"' <br/><b>Reason:</b> "+str(e))
             
     def queryProvider(self,text,category,pages,providerName,perPageCallback=None,whenDoneCallback=None):#Exceptions here should be managed by the caller
         return self.engine.makeQuery(self.providers[providerName], text, category, pages,perPageCallback,whenDoneCallback)
     
     def reset(self):
         if  self.providers:
             self.providers.clear() 
Example #10
0
 def __init__(self):
     user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7'
     self.headers = {'User-Agent': user_agent}
     self.ip_url = 'http://icanhazip.com/'
     retries = Retry(connect=5, read=25, redirect=5)
     self.agent = PoolManager(
         10, retries=retries, timeout=Timeout(total=30.0))
Example #11
0
class Do:
    def __init__(self, url, **kwargs):
        self.url = url
        self.http = PoolManager()
        self.connect_timeout = kwargs['connect_timeout'] if kwargs['connect_timeout'] else 2.0
        self.read_timeout = kwargs['read_timeout'] if kwargs['read_timeout'] else 2.0
        self.available_status = kwargs['available_status'] if kwargs['available_status'] else '200,'
        self.alert_mail = kwargs['alert_mail']

        self._get()

    def _get(self):
        r = None
        try:
            start = time.perf_counter()
            r = self.http.request('GET', self.url, timeout=Timeout(connect=self.connect_timeout, read=self.read_timeout))
            end = time.perf_counter()
        except Exception as ex:
            send_mail(self.alert_mail, 'Request {} failed'.format(self.url), str(ex))

        available_status = self.available_status.split(',')

        if not r:
            return None
        if not hasattr(r, 'status'):
            return None

        if str(r.status) in available_status:
            logger.info(self.url, end-start)
        else:
            send_mail(self.alert_mail, '{} :{}'.format(self.url, r.status), '{}\r\n{}\r\n{}'.format(r.status, r.data, end-start))
Example #12
0
    def download(self):
        if self.url is None or self.url == '':
            return DOWNLOAD_RESULT['FAIL']

        ##################### Start Download Web Page #####################
        printState(hint="Connecting", msg=self.url)
        parse_url = parseURL(self.url)
        scheme = parse_url.scheme
        (filename, filetype) = getFileInURL(parse_url.path)

        timeout = Timeout(connect=2., read=7.)
        if scheme.lower() is 'https':
            http = PoolManager(
                cert_reqs='CERT_REQUIRED', 
                ca_certs=certifi.where(),
                timeout=timeout
            )
        else:
            http = PoolManager(timeout=timeout)

        try:
            r = http.request('GET', self.url)
            printState(hint='Establish', msg=self.url)

        except SSLError as e:
            printFail(hint="SSL Error", msg=self.url)
            return DOWNLOAD_RESULT['FAIL']
        except MaxRetryError as e:
            printFail(hint="Resolve Error", msg=self.url)
            return DOWNLOAD_RESULT['FAIL']
        ##################### End #####################

        ##################### Start Save Web Page #####################
        if isNormalConn(r.status):
            try:
                file_name = save(data=r.data,filename=filename, dir=DOWNLOAD_DIR)
            except AttributeError as e:
                printFail(hint="Save file fail in", msg=self.url)
                return DOWNLOAD_RESULT['FAIL']
            URL_VISITED_FILE_LIST.put(file_name)

        URL_VISITED_LIST.append(self.url)
        printSuccess(hint="Finish", msg=self.url)
        self.url = None
        self.fail_time = 0
        return DOWNLOAD_RESULT['SUCCESS']
Example #13
0
def urllib3_test():
    from urllib3 import PoolManager

    errors = 0
    oks = 0
    p = PoolManager(20)
    for a in url:
        try:
            p.request(method="GET", url=a)
            oks += 1
        except:
            errors += 1
            continue

        print ".",
    print
    print "Errors: %s | Oks: %s." % (str(errors), str(oks))
Example #14
0
    def closed(self, reason):
        self.log("\n\n *** finder spider completed ***\n  ", level=log.INFO)
        self.log("number of unique onions queued: " + str(len(self.onions)), level=log.INFO)
        pool = PoolManager()

        for onion in self.onions:
            onion = onion.replace("https:", "http:")
            onion = onion.replace(".onion", ".onion/")
            onion = onion[:30]
            json = '{"url":"' + onion + '"}'
            self.log("POSTing onion to ahmia: " + json)

            post_url = "https://ahmia.fi/address/"
            content_type = {"Content-Type": "application/json"}
            req = pool.urlopen("POST", post_url, headers=content_type, body=json)
            if req.status != 200 and req.status != 403:
                self.log("Failed to POST " + onion + " server responded with HTTP " + str(req.status), level=log.ERROR)
Example #15
0
    def __init__(self, url, **kwargs):
        self.url = url
        self.http = PoolManager()
        self.connect_timeout = kwargs['connect_timeout'] if kwargs['connect_timeout'] else 2.0
        self.read_timeout = kwargs['read_timeout'] if kwargs['read_timeout'] else 2.0
        self.available_status = kwargs['available_status'] if kwargs['available_status'] else '200,'
        self.alert_mail = kwargs['alert_mail']

        self._get()
Example #16
0
    def _get_report_file(self, url):
        # Ensure file url matches the hostname in settings,
        # workaround for Canvas bug help.instructure.com/tickets/362386
        url = re.sub(r'^https://[^/]+', settings.RESTCLIENTS_CANVAS_HOST, url)

        timeout = getattr(settings, "RESTCLIENTS_TIMEOUT", 15.0)
        cafile = getattr(settings, "RESTCLIENTS_CA_BUNDLE",
                         "/etc/ssl/certs/ca-bundle.crt")
        pool_manager = PoolManager(cert_reqs="CERT_REQUIRED",
                                   ca_certs=cafile,
                                   socket_timeout=timeout,
                                   retries=5)

        response = pool_manager.request("GET", url)

        if response.status != 200:
            raise DataFailureException(url, response.status, response.data)

        return response.data
Example #17
0
 def __init__(self):
     QtCore.QObject.__init__(self)
     
     self.__pm = PoolManager(timeout=Timeout(10),
                             headers={'Accept-Encoding': 'gzip,deflate'})
     self.__settings = None
     self.providersListDefaultValue = "https://raw.githubusercontent.com/cr0wbar/fishnet-providers/master/providers.json"
     self.downloadProvidersListAtStartupDefaultValue = True
     self.providersDefaultValue = {"remote":[],"local":[]}
     self.pagesDefaultValue = 3
Example #18
0
class Hatebu(object):
 
    http = None
 
    def __init__(self):
        user_agent = {'user-agent': 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'}
 
        self.http = PoolManager(headers=user_agent)
 
    def get_rss_data(self, word, threshold):
        """
        はてブから キーワード( = word)、ブックマーク数 (= threshold)を基に
        xmlデータを取得してくるメソッド
        """
 
        url = urllib.request.quote("b.hatena.ne.jp/keyword/{0}?mode=rss&sort=current&threshold={1}".format(str(word), str(threshold)))
 
        response = self.http.request_encode_url('GET', url)
        result = response.data.decode('utf-8')
 
        return result
 
    def parse_xml_data(self, xml):
        """
        取得してきたXMLデータを解析して必要な情報のみを抜き出す。
        """
 
        result = []
 
 
        feed = feedparser.parse(xml)
 
        """
        各ブックマークは、XMLデータのentriesタグの中にItemタグ単位で保存されている。
        feed["entries"]でentriesの中から一つずつItemを取り出し、dataに格納する。
        dataは、Itemタグ内のtitleやdateの情報がparse関数によってdict型に変換されて
        格納されているため、data["title"]などで必要な情報が得られる。
        """

        for data in feed["entries"]:

            # hatebu_bookmarkcountの項目がない場合があるため、項目がある場合のみ取得 
            if "hatena_bookmarkcount" in data.keys():
 
                tmp = dict(title=data["title"],
                           date=data["date"],
                           url=data["links"][0]["href"],
                           bookmark_count=data["hatena_bookmarkcount"])
                result.append(tmp)
 
        # resultにブックマークを格納した時に、取得した順と逆に格納されてしまうため、取得順になるように reversed関数で配列を逆順にしている。
        return reversed(result)
Example #19
0
class PlainUtility():
	def __init__(self):
		user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7'
		self.headers={'User-Agent':user_agent}
		self.ip_url = 'http://icanhazip.com/'
		self.logger = logging.getLogger('gkp')
		retries = Retry(connect=5, read=5, redirect=5)
		self.agent = PoolManager(10,retries=retries, timeout=Timeout(total=30.0))

	def current_ip(self):
		return self.request(self.ip_url)

	def request(self,url):
		r = self.agent.request('GET',url)
		if r.status == 200: return r.data
		else: self.logger.error('status %s'%r.status)
Example #20
0
    def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
                 trk_scheme=KISSmetrics.TRACKING_SCHEME):
        """Initialize client for use with KISSmetrics API key.

        :param key: API key for product, found on the
                    "KISSmetrics Settings".
        :type key: str
        :param trk_host: tracking host for requests; defaults
                         production tracking service.
        :param trk_proto: the protocol for requests; either be `'http'`
                          or `'https'`.

        """
        self.key = key
        if trk_scheme not in ('http', 'https'):
            raise ValueError('trk_scheme must be one of (http, https)')
        self.http = PoolManager()
        self.trk_host = trk_host
        self.trk_scheme = trk_scheme
Example #21
0
from multiprocessing import Process, freeze_support
import time, socket, random
from flask import request, make_response
from urllib3 import PoolManager
from dophon import properties
from dophon_logger import *

logger = get_logger(DOPHON)

logger.inject_logger(globals())

ports = []  # 记录监听端口

proxy_clusters = {}

pool = PoolManager()


def main_freeze():
    freeze_support()


def redirect_request():
    logger.info('touch path: %s [success]' % (request.path))
    res = pool.request(request.method, '127.0.0.1:' + str(random.choice(ports)) + request.path,
                       fields=request.json if request.is_json else request.form)
    return make_response(res.data)


def outer_entity(boot):
    # 重写路由信息(修改为重定向路径)
Example #22
0
 def pool(self):
     timeout = Timeout(connect=self._conn_timeout, read=self._read_timeout)
     return PoolManager(num_pools=self._num_pools, timeout=timeout)
Example #23
0
def rastrear(codigo):
    disable_warnings()
    http = PoolManager()
    r = http.request('GET', 'https://linketrack.com/'+codigo+'/json')
    return r.data
Example #24
0
    class __Settings(QtCore.QObject):
        
        sendError = QtCore.pyqtSignal(str)
        providerLoading = QtCore.pyqtSignal(int,str)
        
        def __init__(self):
            QtCore.QObject.__init__(self)
            
            self.__pm = PoolManager(timeout=Timeout(10),
                                    headers={'Accept-Encoding': 'gzip,deflate'})
            self.__settings = None
            self.providersListDefaultValue = "https://raw.githubusercontent.com/cr0wbar/fishnet-providers/master/providers.json"
            self.downloadProvidersListAtStartupDefaultValue = True
            self.providersDefaultValue = {"remote":[],"local":[]}
            self.pagesDefaultValue = 3
        
        def __checkSettingsSanity(self):
            if not self.__settings:
                self.__settings = {}
            if not "downloadProvidersListAtStartup" in self.__settings:
                self.__settings["downloadProvidersListAtStartup"] = self.downloadProvidersListAtStartupDefaultValue
            if not "providersList" in self.__settings:
                self.__settings["providersList"] = self.providersListDefaultValue
            if not "providers" in self.__settings:
                self.__settings["providers"] = self.providersDefaultValue
            if not "pages" in self.__settings:
                self.__settings["pages"] = self.pagesDefaultValue              
            
        def getSettings(self):
            return self.__settings
        
        def loadConfiguration(self,path):
            if not isfile(path):
                self.__checkSettingsSanity()
            else:
                
                try:
                    sfile = open(path,"r")
                    self.__settings = loads(sfile.read())
                    sfile.close()
                except IOError as e:
                    self.sendError.emit("Cannot open configuration at '"+path+"' <br/><b>Reason:<b/>" + str(e))
                finally:
                    self.__checkSettingsSanity()

        
        def loadProviders(self):
            #Check if there is a url for the list of providers
            #If there is and we are supposed to download the list,
            #then proceed to download the list.
            #Then download each provider in the downloaded list
            providerManager = ProviderManager().instance
              
            providerManager.reset()
            
            if self.__settings and "downloadProvidersListAtStartup" in self.__settings and self.__settings["downloadProvidersListAtStartup"]:
                
                try:#Load remote list of providers
                    r = self.__pm.urlopen("GET", self.__settings["providersList"])
                    self.providerLoading.emit(10,"Loading list of providers")
                    providersList = loads(r.data.decode("utf-8"))
                    if "list" in providersList:
                        self.__settings["providers"]["remote"].clear()
                        for providerUrl in providersList["list"]:
                            self.__settings["providers"]["remote"].append(providerUrl)
                            
                except Exception as e:
                    self.sendError.emit("cannot retrieve the list of providers at '"+self.__settings["providersList"]+"'<br/><b>Reason:</b> "+str(e))       

            #Load stored remote providers
            
            totalNumberOfProviders = 0
            providersLoaded = 0
            
            if self.__settings and "remote" in self.__settings["providers"]:
                totalNumberOfProviders+=len(self.__settings["providers"]["remote"])
            if self.__settings and "local" in self.__settings["providers"]:
                totalNumberOfProviders+=len(self.__settings["providers"]["local"])
                    
            if self.__settings and "remote" in self.__settings["providers"]:
                for remoteProvider in self.__settings["providers"]["remote"]:
                    providersLoaded+=1
                    self.providerLoading.emit(int(10.+float(providersLoaded)/float(totalNumberOfProviders)*90.),
                                              "Loading provider at "+remoteProvider)
                    providerManager.loadProviderFromUrl(remoteProvider)
                         
            #Load stored local providers
            if self.__settings and "local" in self.__settings["providers"]:
                for localProvider in self.__settings["providers"]["local"]:
                    providersLoaded+=1
                    self.providerLoading.emit(int(10.+float(providersLoaded)/float(totalNumberOfProviders)*90.),
                                              "Loading provider at "+localProvider)
                    providerManager.loadProviderFromFile(localProvider)
                    
        def setDefaultProvider(self,provider):
            self.__settings["defaultProvider"] = provider
            
        def writeConfiguration(self,path):
            try:
                sfile = open(path,"w")
                sfile.write(dumps(self.__settings))
                sfile.close()
            except IOError as e:
                self.sendError.emit("cannot open configuration at '"+Globals.configurationPath+"'<br/><b>Reason:</b> " + str(e))
Example #25
0
class NanoHandle:
    def __init__(self,
                 license_id='default',
                 license_file="~/.BoonLogic.license",
                 timeout=120.0,
                 verify=True,
                 cert=None):
        """Primary handle for BoonNano Pod instances

        The is the primary handle to manage a nano pod instance

        Args:
            license_id (str): license identifier label found within the .BoonLogic.license configuration file
            license_file (str): path to .BoonLogic license file
            timeout (float): read timeout for http requests
            verify:  Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use
            cert (bool): if String, path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’) pair.
        

        Environment:
            BOON_LICENSE_FILE: sets license_file path
            BOON_LICENSE_ID: sets license_id
            BOON_API_KEY: overrides the api-key as found in .BoonLogic.license file
            BOON_API_TENANT: overrides the api-tenant as found in .BoonLogic.license file
            BOON_SERVER: overrides the server as found in .BoonLogic.license file
            PROXY_SERVER: overrides the proxy server as found in .BoonLogic.license file
            BOON_SSL_CERT: path to ssl client cert file (.pem)
            BOON_SSL_VERIFY: Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use


        Example:
            ```python
            try:
                nano = bn.NanoHandle()
            except bn.BoonException as be:
                print(be)
                sys.exit(1)
            ```

        """
        self.license_id = None
        self.api_key = None
        self.api_tenant = None
        self.instance = ''
        self.numeric_format = ''

        env_license_file = os.environ.get('BOON_LICENSE_FILE', None)
        env_license_id = os.environ.get('BOON_LICENSE_ID', None)
        env_api_key = os.environ.get('BOON_API_KEY', None)
        env_api_tenant = os.environ.get('BOON_API_TENANT', None)
        env_server = os.environ.get('BOON_SERVER', None)
        env_proxy_server = os.environ.get('PROXY_SERVER', None)
        env_cert = os.environ.get('BOON_SSL_CERT', None)
        env_verify = os.environ.get('BOON_SSL_VERIFY', None)

        # certificates
        self.cert = 'CERT_REQUIRED' if env_cert else {
            None: 'CERT_NONE',
            True: 'CERT_REQUIRED'
        }[cert]
        if env_verify:
            if env_verify.lower() == 'false':
                self.verify = False
            elif env_verify.lower() == 'true':
                self.verify = True
            else:
                self.verify = env_verify
        else:
            self.verify = verify

        # when license_id comes in as None, use 'default'
        if license_id is None:
            license_id = 'default'

        license_file = env_license_file if env_license_file else license_file
        self.license_id = env_license_id if env_license_id else license_id

        license_path = os.path.expanduser(license_file)
        if not os.path.exists(license_path):
            raise BoonException(
                "license file {} does not exist".format(license_path))
        try:
            with open(license_path, "r") as json_file:
                file_data = json.load(json_file)
        except json.JSONDecodeError as e:
            raise BoonException(
                "json formatting error in .BoonLogic.license file, {}, line: {}, col: {}"
                .format(e.msg, e.lineno, e.colno))
        try:
            license_data = file_data[self.license_id]
        except KeyError:
            raise BoonException(
                "license_id \"{}\" not found in license file".format(
                    self.license_id))

        try:
            self.api_key = env_api_key if env_api_key else license_data[
                'api-key']
        except KeyError:
            raise BoonException(
                "\"api-key\" is missing from the specified license in license file"
            )

        try:
            self.api_tenant = env_api_tenant if env_api_tenant else license_data[
                'api-tenant']
        except KeyError:
            raise BoonException(
                "\"api-tenant\" is missing from the specified license in license file"
            )

        try:
            self.server = env_server if env_server else license_data['server']
        except KeyError:
            raise BoonException(
                "\"server\" is missing from the specified license in license file"
            )

        self.proxy_server = env_proxy_server
        if not self.proxy_server and 'proxy-server' in license_data.keys():
            self.proxy_server = license_data['proxy-server']

        # set up base url
        self.url = self.server + '/expert/v3/'
        if "http" not in self.server:
            self.url = "http://" + self.url

        # create pool manager
        timeout_inst = Timeout(connect=30.0, read=timeout)
        if self.proxy_server:
            # proxy pool
            self.http = ProxyManager(self.proxy_server,
                                     maxsize=10,
                                     timeout=timeout_inst,
                                     cert_reqs=self.cert)
        else:
            # non-proxy pool
            self.http = PoolManager(timeout=timeout_inst, cert_reqs=self.cert)

    def _is_configured(f):
        @wraps(f)
        def inner(*args, **kwargs):
            if args[0].numeric_format not in ['int16', 'uint16', 'float32']:
                return False, "nano instance is not configured"
            return f(*args, **kwargs)

        return inner

    def open_nano(self, instance_id):
        """Creates or attaches to a nano pod instance

        Args:
            instance_id (str): instance identifier to assign to new pod instance

        Returns:
            boolean: true if successful (instance is created or attached)

            str: None when result is true, error string when result=false

        """
        instance_cmd = self.url + 'nanoInstance/' + instance_id + '?api-tenant=' + self.api_tenant

        success, response = simple_post(self, instance_cmd)
        if not success:
            return False, response

        self.instance = instance_id
        return success, response

    def close_nano(self):
        """Closes the pod instance

        Returns:
            result (boolean):  true if successful (nano pod instance was closed)
            response (str): None when result is true, error string when result=false

        """
        close_cmd = self.url + 'nanoInstance/' + self.instance + '?api-tenant=' + self.api_tenant

        # delete instance
        result, response = simple_delete(self, close_cmd)
        if not result:
            return result, response

        self.http.clear()
        return result, None

    def create_config(self,
                      feature_count,
                      numeric_format,
                      cluster_mode='batch',
                      min_val=0,
                      max_val=1,
                      weight=1,
                      label=None,
                      percent_variation=0.05,
                      streaming_window=1,
                      accuracy=0.99,
                      autotune_pv=True,
                      autotune_range=True,
                      autotune_by_feature=True,
                      autotune_max_clusters=1000,
                      exclusions=None,
                      streaming_autotune=True,
                      streaming_buffer=10000,
                      learning_numerator=10,
                      learning_denominator=10000,
                      learning_max_clusters=1000,
                      learning_samples=1000000):
        """Generate a configuration template for the given parameters

        A discrete configuration is specified as a list of min, max, weights, and labels

        Args:
            feature_count (int): number of features per vector
            numeric_format (str): numeric type of data (one of "float32", "uint16", or "int16")
            cluster_mode (str): 'streaming' or 'batch' for expert run type
            min_val: the value that should be considered the minimum value for this feature. This
                can be set to a value larger than the actual min if you want to treat all value less
                than that as the same (for instance, to keep a noise spike from having undue influence
                in the clustering.  a single element list assigns all features with same min_val
            max_val: corresponding maximum value, a single element list assigns all features with same max_val
            weight: weight for this feature, a single element list assigns all features with same weight
            label (list): list of labels to assign to features
            percent_variation (float): amount of variation allowed within clusters
            streaming_window (integer): number of consecutive vectors treated as one inference (parametric parameter)
            accuracy (float): statistical accuracy of the clusters
            autotune_pv (bool): whether to autotune the percent variation
            autotune_range (bool): whether to autotune the min and max values
            autotune_by_feature (bool): whether to have individually set min and max values for each feature
            autotune_max_clusters (int): max number of clusters allowed
            exclusions (list): features to exclude while autotuning
            streaming_autotune (bool): whether to autotune while in streaming mode
            streaming_buffer (int): number of samples to autotune on
            learning_numerator (int): max number of new clusters learned
            learning_denominator (int): number of samples over which the new clusters are learned
            learning_max_clusters (int): max number of clusters before turning off learning
            learning_samples (int): max number of samples before turning off learning


        Returns:
            result (boolean): true if successful (configuration was successfully created)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """

        if isinstance(min_val, int) or isinstance(min_val, float):
            min_val = [min_val] * feature_count
        if isinstance(max_val, int) or isinstance(max_val, float):
            max_val = [max_val] * feature_count
        if isinstance(weight, int):
            weight = [weight] * feature_count

        if exclusions is None:
            exclusions = []

        config = {}
        config['clusterMode'] = cluster_mode
        config['numericFormat'] = numeric_format
        config['features'] = []

        if (isinstance(min_val, list) or isinstance(min_val, np.ndarray)) and (
                isinstance(max_val, list)
                or isinstance(max_val, np.ndarray)) and (isinstance(
                    weight, list) or isinstance(weight, np.ndarray)):
            if len(min_val) != len(max_val) or len(min_val) != len(weight):
                return False, "parameters must be lists of the same length"

            for min, max, w in zip(min_val, max_val, weight):
                tempDict = {}
                tempDict['minVal'] = min
                tempDict['maxVal'] = max
                tempDict['weight'] = w
                config['features'].append(tempDict)
        else:
            return False, "min_val, max_val and weight must be list or numpy array"

        if isinstance(label, list):
            if len(label) != len(min_val):
                return False, "label must be the same length as other parameters"
            for i, l in enumerate(label):
                config['features'][i]['label'] = l
        elif label:
            return False, "label must be list"

        config['percentVariation'] = percent_variation
        config['accuracy'] = accuracy
        config['streamingWindowSize'] = streaming_window

        config['autoTuning'] = {}
        config['autoTuning']['autoTuneByFeature'] = autotune_by_feature
        config['autoTuning']['autoTunePV'] = autotune_pv
        config['autoTuning']['autoTuneRange'] = autotune_range
        config['autoTuning']['maxClusters'] = autotune_max_clusters
        if isinstance(exclusions, list):
            config['autoTuning']['exclusions'] = exclusions
        elif exclusions:
            return False, 'exclusions must be a list'

        if config['clusterMode'] == 'streaming':
            config['streaming'] = {}
            config['streaming']['enableAutoTuning'] = streaming_autotune
            config['streaming']['samplesToBuffer'] = streaming_buffer
            config['streaming']['learningRateNumerator'] = learning_numerator
            config['streaming'][
                'learningRateDenominator'] = learning_denominator
            config['streaming']['learningMaxClusters'] = learning_max_clusters
            config['streaming']['learningMaxSamples'] = learning_samples

        return True, config

    def configure_nano(self,
                       feature_count=1,
                       numeric_format='float32',
                       cluster_mode='batch',
                       min_val=0,
                       max_val=1,
                       weight=1,
                       label=None,
                       percent_variation=.05,
                       streaming_window=1,
                       accuracy=.99,
                       autotune_pv=True,
                       autotune_range=True,
                       autotune_by_feature=True,
                       autotune_max_clusters=1000,
                       exclusions=None,
                       streaming_autotune=True,
                       streaming_buffer=10000,
                       learning_numerator=10,
                       learning_denominator=10000,
                       learning_max_clusters=1000,
                       learning_samples=1000000,
                       config=None):
        """Returns the posted clustering configuration

         Args:
             feature_count (int): number of features per vector
             numeric_format (str): numeric type of data (one of "float32", "uint16", or "int16")
             cluster_mode (str): 'streaming' or 'batch' mode to run expert
             min_val: list of minimum values per feature, if specified as a single value, use that on all features
             max_val: list of maximum values per feature, if specified as a single value, use that on all features
             weight: influence each column has on creating a new cluster
             label (list): name of each feature (if applicable)
             percent_variation (float): amount of variation within each cluster
             streaming_window (integer): number of consecutive vectors treated as one inference (parametric parameter)
             accuracy (float): statistical accuracy of the clusters
             autotune_pv (bool): whether to autotune the percent variation
             autotune_range (bool): whether to autotune the min and max values
             autotune_by_feature (bool): whether to have individually set min and max values for each feature
             autotune_max_clusters (int): max number of clusters allowed
             exclusions (list): features to exclude while autotuning
             streaming_autotune (bool): whether to autotune while in streaming mode
             streaming_buffer (int): number of samples to autotune on
             learning_numerator (int): max number of new clusters learned
             learning_denominator (int): number of samples over which the new clusters are learned
             learning_max_clusters (int): max number of clusters before turning off learning
             learning_samples (int): max number of samples before turning off learning
             config (dict): dictionary of configuration parameters

         Returns:
             result (boolean): true if successful (configuration was successfully loaded into nano pod instance)
             response (dict or str): configuration dictionary when result is true, error string when result is false

         """

        if config is None:
            success, config = self.create_config(
                feature_count, numeric_format, cluster_mode, min_val, max_val,
                weight, label, percent_variation, streaming_window, accuracy,
                autotune_pv, autotune_range, autotune_by_feature,
                autotune_max_clusters, exclusions, streaming_autotune,
                streaming_buffer, learning_numerator, learning_denominator,
                learning_max_clusters, learning_samples)
            if not success:
                return False, config
        body = json.dumps(config)

        config_cmd = self.url + 'clusterConfig/' + self.instance + '?api-tenant=' + self.api_tenant
        result, response = simple_post(self, config_cmd, body=body)
        if result:
            self.numeric_format = config['numericFormat']

        return result, response

    def nano_list(self):
        """Returns list of nano instances allocated for a pod

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): json dictionary of pod instances when result=true, error string when result=false

        """

        # build command
        instance_cmd = self.url + 'nanoInstances' + '?api-tenant=' + self.api_tenant

        return simple_get(self, instance_cmd)

    @_is_configured
    def save_nano(self, filename):
        """serialize a nano pod instance and save to a local file

        Args:
            filename (str): path to local file where saved pod instance should be written

        Returns:
            result (boolean):  true if successful (pod instance was written)
            response (str): None when result is true, error string when result=false

        """

        # build command
        snapshot_cmd = self.url + 'snapshot/' + self.instance + '?api-tenant=' + self.api_tenant

        # serialize nano
        result, response = simple_get(self, snapshot_cmd)
        if not result:
            return result, response

        # at this point, the call succeeded, saves the result to a local file
        try:
            with open(filename, 'wb') as fp:
                fp.write(response)
        except Exception as e:
            return False, e.strerror

        return True, None

    def restore_nano(self, filename):
        """Restore a nano pod instance from local file

        Args:
            filename (str): path to local file containing saved pod instance

        Returns:
            result (boolean):  true if successful (nano pod instance was restored)
            response (str): None when result is true, error string when result=false

        """

        # verify that input file is a valid nano file (gzip'd tar with Magic Number)
        try:
            with tarfile.open(filename, 'r:gz') as tp:
                with tp.extractfile('/CommonState/MagicNumber') as magic_fp:
                    magic_num = magic_fp.read()
                    if magic_num != b'\xda\xba':
                        return False, 'file {} is not a Boon Logic nano-formatted file, bad magic number'.format(
                            filename)
        except KeyError:
            return False, 'file {} is not a Boon Logic nano-formatted file'.format(
                filename)
        except Exception as e:
            return False, 'corrupt file {}'.format(filename)

        with open(filename, 'rb') as fp:
            nano = fp.read()

        # build command
        snapshot_cmd = self.url + 'snapshot/' + self.instance + '?api-tenant=' + self.api_tenant

        fields = {'snapshot': (filename, nano)}

        result, response = multipart_post(self, snapshot_cmd, fields=fields)

        if not result:
            return result, response

        self.numeric_format = response['numericFormat']

        return True, response

    @_is_configured
    def autotune_config(self):
        """Autotunes the percent variation, min and max for each feature

        Returns:
            result (boolean): true if successful (autotuning was completed)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """

        # build command
        config_cmd = self.url + 'autoTune/' + self.instance + '?api-tenant=' + self.api_tenant

        # autotune parameters
        return simple_post(self, config_cmd)

    @_is_configured
    def get_config(self):
        """Gets the configuration for this nano pod instance

        Returns:
            result (boolean): true if successful (configuration was found)
            response (dict or str): configuration dictionary when result is true, error string when result is false

        """
        config_cmd = self.url + 'clusterConfig/' + self.instance + '?api-tenant=' + self.api_tenant
        return simple_get(self, config_cmd)

    @_is_configured
    def load_file(self, file, file_type, gzip=False, append_data=False):
        """Load nano data from a file

        Args:
            file (str): local path to data file
            file_type (str): file type specifier, must be either 'cvs' or 'raw'
            gzip (boolean): true if file is gzip'd, false if not gzip'd
            append_data (boolean): true if data should be appended to previous data, false if existing
                data should be truncated

        Returns:
            result (boolean): true if successful (file was successful loaded into nano pod instance)
            response (str): None when result is true, error string when result=false

        """

        # load the data file
        try:
            with open(file, 'rb') as fp:
                file_data = fp.read()
        except FileNotFoundError as e:
            return False, e.strerror
        except Exception as e:
            return False, e

        # verify file_type is set correctly
        if file_type not in ['csv', 'csv-c', 'raw', 'raw-n']:
            return False, 'file_type must be "csv", "csv-c", "raw" or "raw-n"'

        file_name = os.path.basename(file)

        fields = {'data': (file_name, file_data)}

        # build command
        dataset_cmd = self.url + 'data/' + self.instance + '?api-tenant=' + self.api_tenant
        dataset_cmd += '&fileType=' + file_type
        dataset_cmd += '&gzip=' + str(gzip).lower()
        dataset_cmd += '&appendData=' + str(append_data).lower()

        return multipart_post(self, dataset_cmd, fields=fields)

    @_is_configured
    def load_data(self, data, append_data=False):
        """Load nano data from an existing numpy array or simple python list

        Args:
            data (np.ndarray or list): numpy array or list of data values
            append_data (boolean): true if data should be appended to previous data, false if existing
                data should be truncated

        Returns:
            result (boolean): true if successful (data was successful loaded into nano pod instance)
            response (str): None when result is true, error string when result=false

        """
        data = normalize_nano_data(data, self.numeric_format)
        file_name = 'dummy_filename.bin'
        file_type = 'raw'

        fields = {'data': (file_name, data)}

        # build command
        dataset_cmd = self.url + 'data/' + self.instance + '?api-tenant=' + self.api_tenant
        dataset_cmd += '&fileType=' + file_type
        dataset_cmd += '&appendData=' + str(append_data).lower()

        return multipart_post(self, dataset_cmd, fields=fields)

    def set_learning_status(self, status):
        """returns list of nano instances allocated for a pod

        Args:
            status (boolean): true or false of whether to learning is on or off

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): json dictionary of pod instances when result=true, error string when result=false

        """
        if status not in [True, False]:
            return False, 'status must be a boolean'
        # build command
        learning_cmd = self.url + 'learning/' + self.instance + '?enable=' + str(
            status).lower() + '&api-tenant=' + self.api_tenant

        return simple_post(self, learning_cmd)

    def set_root_cause_status(self, status):
        """configures whether or not to save new clusters coming in for root cause analysis

        Args:
            status (boolean): true or false of whether root cause is on or off

        Returns:
            result (boolean):  true if successful (list was returned)
            response (str): status of root cause

        """
        if status not in [True, False]:
            return False, 'status must be a boolean'
        # build command
        learning_cmd = self.url + 'rootCause/' + self.instance + '?enable=' + str(
            status).lower() + '&api-tenant=' + self.api_tenant

        return simple_post(self, learning_cmd)

    def run_nano(self, results=None):
        """Clusters the data in the nano pod buffer and returns the specified results

        Args:
            results (str): comma separated list of result specifiers

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        results_str = ''
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        elif results:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        nano_cmd = self.url + 'nanoRun/' + self.instance + '?api-tenant=' + self.api_tenant
        if results:
            nano_cmd += '&results=' + results_str

        return simple_post(self, nano_cmd)

    @_is_configured
    def run_streaming_nano(self, data, results=None):
        """Load streaming data into self-autotuning nano pod instance, run the nano and return results

        Args:
            data (np.ndarray or list): numpy array or list of data values
            results (str): comma separated list of result specifiers

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (data was successful streamed to nano pod instance)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        data = normalize_nano_data(data, self.numeric_format)
        file_name = 'dummy_filename.bin'
        file_type = 'raw'

        fields = {'data': (file_name, data)}

        results_str = ''
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        elif results:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        streaming_cmd = self.url + 'nanoRunStreaming/' + self.instance + '?api-tenant=' + self.api_tenant
        streaming_cmd += '&fileType=' + file_type
        if results:
            streaming_cmd += '&results=' + results_str

        return multipart_post(self, streaming_cmd, fields=fields)

    def get_version(self):
        """Version information for this nano pod

        Returns:
            result (boolean): true if successful (version information was retrieved)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        # build command (minus the v3 portion)
        version_cmd = self.url[:-3] + 'version' + '?api-tenant=' + self.api_tenant
        return simple_get(self, version_cmd)

    @_is_configured
    def get_buffer_status(self):
        """Results related to the bytes processed/in the buffer

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        status_cmd = self.url + 'bufferStatus/' + self.instance + '?api-tenant=' + self.api_tenant
        return simple_get(self, status_cmd)

    @_is_configured
    def get_nano_results(self, results='All'):
        """Results per pattern

        Args:
            results (str): comma separated list of results

                ID = cluster ID

                SI = smoothed anomaly index

                RI = raw anomaly index

                FI = frequency index

                DI = distance index

                All = ID,SI,RI,FI,DI

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """
        # build results command
        if str(results) == 'All':
            results_str = 'ID,SI,RI,FI,DI'
        else:
            for result in results.split(','):
                if result not in ['ID', 'SI', 'RI', 'FI', 'DI']:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        results_cmd = self.url + 'nanoResults/' + self.instance + '?api-tenant=' + self.api_tenant
        results_cmd += '&results=' + results_str

        return simple_get(self, results_cmd)

    @_is_configured
    def get_nano_status(self, results='All'):
        """Results in relation to each cluster/overall stats

        Args:
            results (str): comma separated list of results

                PCA = principal components (includes 0 cluster)

                clusterGrowth = indexes of each increase in cluster (includes 0 cluster)

                clusterSizes = number of patterns in each cluster (includes 0 cluster)

                anomalyIndexes = anomaly index (includes 0 cluster)

                frequencyIndexes = frequency index (includes 0 cluster)

                distanceIndexes = distance index (includes 0 cluster)

                totalInferences = total number of patterns clustered (overall)

                averageInferenceTime = time in milliseconds to cluster per
                    pattern (not available if uploading from serialized nano) (overall)

                numClusters = total number of clusters (includes 0 cluster) (overall)

                All = PCA,clusterGrowth,clusterSizes,anomalyIndexes,frequencyIndexes,distanceIndexes,totalInferences,numClusters

        Returns:
            result (boolean): true if successful (nano was successfully run)
            response (dict or str): dictionary of results when result is true, error message when result = false

        """

        # build results command
        if str(results) == 'All':
            results_str = 'PCA,clusterGrowth,clusterSizes,anomalyIndexes,frequencyIndexes,' \
                          'distanceIndexes,totalInferences,numClusters'
        else:
            for result in results.split(','):
                if result not in [
                        'PCA', 'clusterGrowth', 'clusterSizes',
                        'anomalyIndexes', 'frequencyIndexes',
                        'distanceIndexes', 'totalInferences', 'numClusters',
                        'averageInferenceTime'
                ]:
                    return False, 'unknown result "{}" found in results parameter'.format(
                        result)
            results_str = results

        # build command
        results_cmd = self.url + 'nanoStatus/' + self.instance + '?api-tenant=' + self.api_tenant
        results_cmd = results_cmd + '&results=' + results_str

        return simple_get(self, results_cmd)

    def get_root_cause(self, id_list=None, pattern_list=None):
        """Get root cause

        Args:
            id_list (list): list of IDs to return the root cause for
            pattern_list (list): list of pattern vectors to calculate the root cause against the model

        Returns:
            A list containing the root cause for each pattern/id provided for a sensor:

                [float]

        Raises:
            BoonException: if Amber cloud gives non-200 response
        """
        if id_list is None and pattern_list is None:
            raise BoonException(
                'Must specify either list of ID(s) or list of pattern(s).')

        response = {'RootCauseFromID': [], 'RootCauseFromPattern': []}
        if id_list is not None:
            id_list = [str(element) for element in id_list]
            rc_cmd = self.url + 'rootCauseFromID/' + self.instance + '?api-tenant=' + self.api_tenant
            rc_cmd = rc_cmd + '&clusterID=' + ",".join(id_list)

            success, status = simple_get(self, rc_cmd)
            if success:
                response['RootCauseFromID'] = status
            else:
                return success, status

        if pattern_list is not None:
            if len(np.array(
                    pattern_list).shape) == 1:  # only 1 pattern provided
                pattern_list = [pattern_list]
            else:
                for i, pattern in enumerate(pattern_list):
                    pattern_list[i] = ','.join(
                        [str(element) for element in pattern])
            rc_cmd = self.url + 'rootCauseFromPattern/' + self.instance + '?api-tenant=' + self.api_tenant
            rc_cmd = rc_cmd + '&pattern=' + '[[' + "],[".join(
                pattern_list) + ']]'

            success, status = simple_get(self, rc_cmd)
            if success:
                response['RootCauseFromPattern'] = status
            else:
                return success, status

        return True, response
Example #26
0
 def __init__(self):
     QtCore.QObject.__init__(self)
     self.providers = {}
     self.poolManager = PoolManager(timeout=Timeout(10),
                                    headers={'Accept-Encoding': 'gzip,deflate'})
     self.engine = Engine()
Example #27
0
def scrap_lego_faces(metadata, path, resize=64, n_processes=4):
  r""" This function does not filter out bad images """
  from tqdm import tqdm
  from PIL import Image

  def _download_image(meta, conn):
    part_id, desc = meta
    desc = desc.replace("Minifigure, ", "")
    return_path = []
    with warnings.catch_warnings():
      warnings.filterwarnings('ignore', category=InsecureRequestWarning)
      response = conn.request(
          "GET",
          f"https://www.bricklink.com/v2/catalog/catalogitem.page?P={part_id}",
          preload_content=False)
      img_url = re.search(
          rf"\bimg\.bricklink\.com\/ItemImage\/[A-Z]+\/[0-9]+\/{part_id}\.png\b",
          str(response.read(), 'utf-8'),
      )
      if img_url is not None:
        img_url = img_url.group(0)
        img_response = conn.request("GET",
                                    f"https://{img_url}",
                                    preload_content=False)
        image_path = f"{path}/{part_id}"
        # convert to jpg with white background
        image = Image.open(img_response).convert("RGBA")
        background = Image.new("RGBA", image.size, (255, 255, 255))
        image = Image.alpha_composite(background, image).convert("RGB")
        del background
        width, height = image.size
        ratio = width / height
        # split the image
        if ratio >= 1.6 or part_id:
          im = np.array(image)
          M = im.shape[0]
          N = im.shape[1] // 2
          halves = [
              im[x:x + M, y:y + N]
              for x in range(0, im.shape[0], M)
              for y in range(0, im.shape[1], N)
          ]
          image = [Image.fromarray(half, "RGB") for half in halves[:2]]
        else:
          image = [image]
        # crop to square image
        for idx, im in enumerate(image):
          width, height = im.size
          new_len = min(width, height)
          left = (width - new_len) / 2
          top = (height - new_len) / 2
          right = (width + new_len) / 2
          bottom = (height + new_len) / 2
          im = im.crop((left, top, right, bottom))
          # resize the image
          if resize is not None:
            im = im.resize((int(resize), int(resize)))
          # save image
          out = image_path + ('.jpg' if idx == 0 else ('_%d.jpg' % idx))
          im.save(out, "JPEG", quality=90)
          return_path.append(out)
          del im
    return return_path

  conn = PoolManager(
      num_pools=2,
      headers={
          "User-Agent":
              "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0"
      },
      maxsize=100,
      cert_reqs='CERT_NONE')
  all_images = []
  for image_path in tqdm(MPI(
      jobs=list(zip(metadata["Number"].values, metadata["Name"].values)),
      func=partial(_download_image, conn=conn),
      ncpu=max(1, int(n_processes)),
      batch=1,
  ),
                         desc="Download lego faces",
                         unit="image",
                         total=metadata.shape[0]):
    all_images += image_path
  return np.array(all_images)
Example #28
0
def main():
    parser = argparse.ArgumentParser(
        description="Uploads images/documents to GitHub as issue attachments.\n"
        "See https://github.com/zmwangx/ghuc for detailed documentation.")
    parser.add_argument(
        "-r",
        "--repository-id",
        type=int,
        default=1,
        help="id of repository to upload from (defaults to 1)",
    )
    parser.add_argument("-x", "--proxy", help="HTTP or SOCKS proxy")
    parser.add_argument("-q",
                        "--quiet",
                        action="store_true",
                        help="set logging level to ERROR")
    parser.add_argument("--debug",
                        action="store_true",
                        help="set logging level to DEBUG")
    parser.add_argument(
        "--gui",
        action="store_true",
        help=
        "disable headless mode when running browser sessions through Selenium WebDriver",
    )
    parser.add_argument(
        "--container",
        action="store_true",
        help="add extra browser options to work around problems in containers",
    )
    parser.add_argument("--version", action="version", version=__version__)
    parser.add_argument("paths", type=pathlib.Path, nargs="+", metavar="PATH")
    args = parser.parse_args()

    if args.debug:
        custom_level = logging.DEBUG
    elif args.quiet:
        custom_level = logging.ERROR
    else:
        custom_level = None
    if custom_level is not None:
        logger.setLevel(custom_level)
        logger.handlers[0].setLevel(custom_level)

    global repository_id
    global proxy
    global headless
    global container

    repository_id = args.repository_id
    proxy = args.proxy or os.getenv("https_proxy")
    if proxy and not re.match(r"^(https?|socks(4a?|5h?))://", proxy):
        proxy = "http://%s" % proxy
    if proxy:
        logger.debug("using proxy %s", proxy)
    headless = not args.gui
    container = args.container

    common_http_options = dict(cert_reqs="CERT_REQUIRED", timeout=3.0)
    if not proxy:
        http_client = PoolManager(**common_http_options)
    elif proxy.startswith("http"):
        http_client = ProxyManager(proxy, **common_http_options)
    elif proxy.startswith("socks"):
        if SOCKSProxyManager:
            http_client = SOCKSProxyManager(proxy, **common_http_options)
        else:
            logger.critical(
                "your urllib3 installation does not support SOCKS proxies")
            sys.exit(1)
    else:
        logger.critical("unrecognized proxy type %s", proxy)
        sys.exit(1)

    try:
        load_cookie_and_token()
        count = len(args.paths)
        num_errors = 0
        for path in args.paths:
            try:
                upload_asset(http_client, path)
            except UploadError:
                num_errors += 1
        if count > 1 and num_errors > 0:
            logger.warning("%d failed uploads", num_errors)
        sys.exit(0 if num_errors == 0 else 1)
    except ExtractionError:
        logger.critical("aborting due to inability to extract credentials")
        sys.exit(1)
Example #29
0
base_path = os.path.abspath('.')
log_path = os.path.join(base_path, 'Log')
log_file_name = 'server_error.log'

logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
handle = logging.FileHandler(os.path.join(log_path, log_file_name))
handle.setLevel(level=logging.INFO)
logger.addHandler(handle)

block_chain_have_sync = []
node = []
block_new = None
transaction_not_verify_cache = []
transaction_have_verify_cache = []
httpRequest = PoolManager()
main_node = None
ChainFlag = {}
RequestNumber = {}
RequestFlag = {}
ResponseFlag = {}
ResponseNumber = {}
NodeNumber = {}
SyncNumber = []
ChainFlag.setdefault('miner_flag', False)
ChainFlag.setdefault('sync_flag', False)


###
# register node
###
Example #30
0
#!/usr/bin/env python
from urllib3 import PoolManager
from os import system
from sys import argv
from re import findall

http = PoolManager()
rqst = http.request('GET',
                    argv[1])  #'https://www.youtube.com/watch?v=B-oqeIEYnFc'
Data = rqst.data
Data = Data.decode('utf-8')
videos = findall(r'watch\?v=.{11}', Data)
videos = set(videos)
for vid in videos:
    cmd = 'cclive https://www.youtube.com/%s' % vid
    print(cmd)
    system(cmd)
Example #31
0
 def get_headers(self, url):
     pool = PoolManager()
     response = pool.request('GET', url, preload_content=False)
     return response.headers
Example #32
0
 def init_poolmanager(self, connections, maxsize, block=False):
     self.poolmanager = PoolManager(num_pools=connections,
                                    maxsize=maxsize,
                                    block=block,
                                    ssl_version=ssl.PROTOCOL_SSLv23)
Example #33
0
class Client(object):
    """Interface to KISSmetrics tracking service"""

    def __init__(self, key, trk_host=KISSmetrics.TRACKING_HOSTNAME,
                 trk_scheme=KISSmetrics.TRACKING_SCHEME):
        """Initialize client for use with KISSmetrics API key.

        :param key: API key for product, found on the
                    "KISSmetrics Settings".
        :type key: str
        :param trk_host: tracking host for requests; defaults
                         production tracking service.
        :param trk_proto: the protocol for requests; either be `'http'`
                          or `'https'`.

        """
        self.key = key
        if trk_scheme not in ('http', 'https'):
            raise ValueError('trk_scheme must be one of (http, https)')
        self.http = PoolManager()
        self.trk_host = trk_host
        self.trk_scheme = trk_scheme

    def record(self, person, event, properties=None, timestamp=None,
               path=KISSmetrics.RECORD_PATH):
        """Record `event` for `person` with any `properties`.

        :param person: the individual performing the `event`
        :param event: the `event` name that was performed
        :param properties: any additional data to include
        :type properties: dict
        :param timestamp: when the `event` was performed; optional for
                          back-dating
        :param path: HTTP endpoint to use; defaults to
                    ``KISSmetrics.RECORD_PATH``

        :returns: an HTTP response for the request
        :rtype: `urllib3.response.HTTPResponse`

        """
        this_request = request.record(self.key, person, event,
                                      timestamp=timestamp,
                                      properties=properties,
                                      scheme=self.trk_scheme,
                                      host=self.trk_host, path=path)
        return self._request(this_request)

    def set(self, person, properties=None, timestamp=None,
            path=KISSmetrics.SET_PATH):
        """Set a property (or properties) for a `person`.

        :param person: individual to associate properties with
        :param properties: key-value pairs to associate with `person`
        :type properties: dict
        :param timestamp: when the `event` was performed; optional for
                          back-dating
        :param path: HTTP endpoint to use; defaults to
                    ``KISSmetrics.SET_PATH``

        :returns: an HTTP response for the request
        :rtype: `urllib3.response.HTTPResponse`

        """
        this_request = request.set(self.key, person, timestamp=timestamp,
                                   properties=properties,
                                   scheme=self.trk_scheme, host=self.trk_host,
                                   path=path)
        return self._request(this_request)

    def alias(self, person, identity, path=KISSmetrics.ALIAS_PATH):
        """Map `person` to `identity`; actions done by one resolve to other.

        :param person: consider as same individual ``identity``; the
                       source of the alias operation
        :type person: str or unicode
        :param identity: consider as an alias of ``person``; the target
                         of the alias operation
        :type identity: str or unicode
        :param path: HTTP endpoint to use; defaults to
                    ``KISSmetrics.ALIAS_PATH``

        :returns: an HTTP response for the request
        :rtype: `urllib3.response.HTTPResponse`

        Note the direction of the mapping is ``person`` to ``identity``
        (so "``person`` is also known as ``identity``" or "``person`` =>
        ``identity``" when looking at it as "``<source>`` => ``<target>``")

        When consulting the Aliasing documentation, `person` corresponds
        to ``query_string.PERSON_PARAM`` and `identity` corresponds to
        ``query_string.ALIAS_PARAM``.

        Aliasing is not a reversible operation.  When aliasing to an
        identity, take care not to use a session identifier or any other
        value that is not relatively stable (a value that will not
        change per request or per session).

        For more information see the API Specifications on `Aliasing
        <http://support.kissmetrics.com/apis/specifications.html#aliasing-users>`_.

        """
        this_request = request.alias(self.key, person, identity,
                                     scheme=self.trk_scheme,
                                     host=self.trk_host, path=path)
        return self._request(this_request)

    def _request(self, uri, method='GET'):
        return self.http.request(method, uri)
Example #34
0
    def update_db(self, year):
        filename = CVE_FEED_FILENAME.replace('$$$$', year) + '.json'
        file_path = path.join(CACHE_PATH, filename)
        meta_filename = CVE_FEED_FILENAME.replace('$$$$', year) + '.meta'
        meta_file_path = path.join(CACHE_PATH, year + '.meta')

        if environ.get('http_proxy') is not None:
            http = ProxyManager(environ.get('http_proxy'), maxsize=10)
        else:
            http = PoolManager()
        disable_warnings(urllib3_exceptions.InsecureRequestWarning)
        r = None
        meta = None
        try:
            r = http.request('GET',
                             CVE_FEED_URL + meta_filename,
                             preload_content=False)
        except Exception as e:
            print("[!] Error obtaining CVE meta data: " + str(e))

        if path.isfile(meta_file_path):
            with open(meta_file_path, 'r') as myfile:
                meta = myfile.read()
            if r is not None and meta is not None and r.data.decode(
                    'utf-8').replace('\r', '') == meta:
                return

        else:
            if r is not None:
                with open(meta_file_path, 'wb') as out_file:
                    copyfileobj(r, out_file)

        try:
            with http.request('GET',
                              CVE_FEED_URL + filename + '.zip',
                              preload_content=False) as r, open(
                                  file_path + '.zip', 'wb') as out_file:
                copyfileobj(r, out_file)
        except Exception as e:
            print("[!] Error downloading CVE feed: " + str(e))
            return
        try:
            archive = ZipFile(file_path + '.zip', 'r')
            xml_data = archive.extract(filename, CACHE_PATH)
        except Exception as e:
            print("[!] Error extracting the CVE archive: " + str(e))
            return

        cve_cache = []
        actions = []
        count = 0

        with open(file_path, encoding='utf-8') as data_file:
            data = json.loads(data_file.read())["CVE_Items"]
        for i in data:
            item = cve_item()
            item.id = i["cve"]["CVE_data_meta"]["ID"]
            for j in i['cve']['references']['reference_data']:
                item.references.append(j)
            item.summary = i['cve']['description']['description_data'][0][
                "value"]
            for j in i['configurations']['nodes']:
                if 'cpe' in j:
                    for k in j['cpe']:
                        item.affected.append({
                            "vuln":
                            k['vulnerable'],
                            "cpe22":
                            k['cpe22Uri'],
                            "cpe23":
                            k['cpe23Uri'],
                            "vStartE":
                            k.get('versionStartExcluding', ''),
                            "vStartI":
                            k.get('versionStartIncluding', ''),
                            "vEndE":
                            k.get('versionEndExcluding', ''),
                            "vEndI":
                            k.get('versionEndIncluding', '')
                        })
                elif 'children' in j:
                    for t in j['children']:
                        if 'cpe' in t:
                            for k in t['cpe']:
                                item.affected.append({
                                    "vuln":
                                    k['vulnerable'],
                                    "cpe22":
                                    k['cpe22Uri'],
                                    "cpe23":
                                    k['cpe23Uri'],
                                    "vStartE":
                                    k.get('versionStartExcluding', ''),
                                    "vStartI":
                                    k.get('versionStartIncluding', ''),
                                    "vEndE":
                                    k.get('versionEndExcluding', ''),
                                    "vEndI":
                                    k.get('versionEndIncluding', '')
                                })
            if 'baseMetricV3' in i['impact']:
                item.cvss['vector_string_v3'] = i['impact']['baseMetricV3'][
                    'cvssV3']['vectorString']
                item.cvss['score_v3'] = i['impact']['baseMetricV3']['cvssV3'][
                    'baseScore']
            if 'baseMetricV2' in i['impact']:
                item.cvss['vector_string_v2'] = i['impact']['baseMetricV2'][
                    'cvssV2']['vectorString']
                item.cvss['score_v2'] = i['impact']['baseMetricV2']['cvssV2'][
                    'baseScore']
            item.published = i['publishedDate']
            item.last_modified = i['lastModifiedDate']
            cve_cache.append(item)
            if USE_ELASTIC_SEARCH:
                actions.append({
                    "_index": "cve-" + year,
                    "_type": "vulns",
                    "_source": {
                        'cve_id': item.id,
                        'summary': item.summary,
                        'published': item.published,
                        'last_modified': item.last_modified,
                        'score_v3': item.cvss.get('score_v3', 0),
                        'score_v2': item.cvss.get('score_v2', 0),
                        'vector_string_v2':
                        item.cvss.get('vector_string_v2', 'NA'),
                        'vector_string_v3':
                        item.cvss.get('vector_string_v3', 'NA'),
                        'affected': item.affected,
                        'cache-index': count,
                    }
                })
                count = count + 1

        if USE_ELASTIC_SEARCH is True:
            try:
                if self.es.indices.exists(index="cve-" + year):
                    self.es.indices.delete(index='cve-' + year,
                                           ignore=[400, 404],
                                           request_timeout=60)
                mappings = {
                    "mappings": {
                        "vulns": {
                            "properties": {
                                "cve_id": {
                                    "type": "keyword"
                                },
                                "score_v2": {
                                    "type": "float"
                                },
                                "score_v3": {
                                    "type": "float"
                                },
                                "affected": {
                                    "type": "nested",
                                    "properties": {
                                        "cpe22": {
                                            "type": "keyword"
                                        },
                                        "cpe23": {
                                            "type": "keyword"
                                        },
                                        "vStartE": {
                                            "type": "keyword"
                                        },
                                        "vStartI": {
                                            "type": "keyword"
                                        },
                                        "vEndE": {
                                            "type": "keyword"
                                        },
                                        "vEndI": {
                                            "type": "keyword"
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                self.es.indices.create(index="cve-" + year,
                                       ignore=400,
                                       body=mappings)
                self.helpers.bulk(self.es, actions, request_timeout=60)
            except Exception as e:
                print("[!] Elasticsearch indexing error: " + str(e))

        try:
            dump(cve_cache, open(path.join(CACHE_PATH, year + '.db'), "wb"),
                 HIGHEST_PROTOCOL)
            remove(file_path + '.zip')
            remove(file_path)
        except PickleError as e:
            print("[!] Error while caching CVE data: " + str(e))
Example #35
0
try:
    import ipaddress
except:
    print(
        RED1 + BOLD +
        "\n * Package ipaddress not installed. Please install the dependencies before continue.\n"
        "" + GREEN + "   Example: \n"
        "   # pip install -r requires.txt\n" + ENDC)
    exit(0)

from urllib3 import PoolManager
from urllib3.util import Timeout

timeout = Timeout(connect=3.0, read=6.0)
pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')

global gl_interrupted
gl_interrupted = False

user_agents = [
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0",
    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
    "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36",
    "Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)",
    "Mozilla/5.0 (compatible; MSIE 6.0; Windows NT 5.1)",
    "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727)",
    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0",
Example #36
0
class OrionConnector:
    """ Connects to the Orion context broker and provide easy use for its REST API.

    """
    version = "v2"
    header_no_payload = {"Accept": "application/json"}
    header_payload = {
        "Accept": "application/json",
        "Content-Type": "application/json"
    }

    _pool_manager = PoolManager()

    @property
    def service_path(self):
        """ Service path as string"""
        return self._service_path

    @service_path.setter
    def service_path(self, value):
        """ Service path to include in commands

        :param value: service paths as list
        :return:
        """

        # removing trailing slashes is needed to modify the
        # service path for hierarchical search.
        # For Orion it makes no differences if '/foo' or '/foo///'
        # is used.
        if value is None:
            self._service_path = None
        elif type(value) == list:
            self._service_path = ", ".join([sp.rstrip("/") for sp in value])
        elif type(value) == str:
            if value != "/":
                self._service_path = value.rstrip("/")
            else:
                self._service_path = value
        else:
            raise Exception("service_path must be list or string")

    def __init__(self,
                 host,
                 codec="utf-8",
                 service=None,
                 service_path=None,
                 oauth_connector=None,
                 authorization_header_name="X-Auth-Token"):
        """ Initialize the connector.

        :param host: The url of the NGSI API  (Ending  '/' will be removed )
        :param codec: The codec used  decoding responses.
        """
        if host[-1] == "/":
            self.host = host[:-1]
        else:
            self.host = host
        self.base_url = self.host + "/" + self.version

        self.url_entities = self.base_url + "/entities"
        self.url_types = self.base_url + "/type"
        self.url_subscriptions = self.base_url + "/subscriptions"
        self.url_batch_update = self.base_url + "/op/update"
        self.batch = self.base_url

        self.codec = codec
        self.service = service
        self._service_path = None
        self.service_path = service_path

        # OAUTH
        self.oauth = oauth_connector
        self.authorization_header_name = authorization_header_name

    def _request(self, body=None, **kwargs):
        """Send a request to the Context Broker"""
        if body:
            body = json.dumps(body)
        headers = kwargs.pop("headers", {}).copy()
        if self.service:
            headers["Fiware-Service"] = self.service
        if self.service_path and "Fiware-ServicePath" not in headers:
            headers["Fiware-ServicePath"] = self.service_path
        if self.oauth:
            headers[self.authorization_header_name] = self.oauth.token
        logger.debug("URL %s\nHEADERS %s\nBODY %s\n", kwargs['url'], headers,
                     body)
        return self._pool_manager.request(body=body, headers=headers, **kwargs)

    def get(self, entity_id, entity_type=None, key_values=False):
        """ Get an entity from the context by its ID. If Orion responses not found a None is returned.

        :param entity_id: The ID of the entity that is retrieved.
        :param entity_type: The entity type that the entities must match.
        :param key_values: Wether a full NGSIv2 entity should be returned or only a keyValues model

        :return: The entity or None
        """
        get_url = self.url_entities + '/' + entity_id

        fields = {}

        if entity_type:
            fields["type"] = entity_type
        if key_values:
            fields["options"] = "keyValues"
        response = self._request(method="GET",
                                 url=get_url,
                                 headers=self.header_no_payload,
                                 fields=fields)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.debug("Not found: %s", get_url)
                return None
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

        return json.loads(response.data.decode(self.codec))

    def count(self,
              entity_type=None,
              id_pattern=None,
              query=None,
              georel=None,
              geometry=None,
              coords=None,
              hierarchical_search=False):
        """ Get the  total amount of entities that match the provided entity class, id pattern and/or query.

        :param entity_type: The entity type that the entities must match .
        :param id_pattern: The entity id pattern that the entities must match.
        :param query: The query that the entities must match.
        :param geometry: Geometry form used to spacial limit the query: "point", "line", "polygon", "box"
        :param georel: Relation between the geometry an  the entities: "coveredBy", "intersects", "equals", "disjoint"
        :param coords: Semicolon separated list of coordinates(coma separated) Ex: "45.7878,3.455454;41.7878,5.455454"
        :param hierarchical_search: Search only in this servicePath or in all sub servicePaths as well

        :return: The amount of entities
        """
        fields = {"options": "count", "limit": 1}
        if entity_type:
            fields["type"] = entity_type
        if id_pattern:
            fields["idPattern"] = id_pattern
        if query:
            fields["q"] = query

        headers = self.header_no_payload.copy()
        if hierarchical_search:
            if not self._service_path:
                raise FiException(
                    None,
                    "Hierarchical search does not work without service path.")
            elif ", " in self._service_path:
                headers["Fiware-ServicePath"] = ", ".join(
                    [sp + "/#" for sp in self.service_path.split(", ")])
            else:
                headers["Fiware-ServicePath"] = self.service_path + "/#"

        if georel and geometry and coords:
            if not (georel
                    in ["coveredBy", "intersects", "equals", "disjoint"]
                    or georel.startswith("near")):
                raise FiException(
                    None,
                    f"({georel}) is not a valid spatial relationship(georel).")
            if geometry not in ["point", "line", "polygon", "box"]:
                raise FiException(None,
                                  f"({geometry}) is not a valid geometry.")
            fields["georel"] = georel
            fields["geometry"] = geometry
            fields["coords"] = coords

        elif georel or geometry or coords:
            raise FiException(
                None,
                f"Geographical Queries requires  georel, geometry and coords  attributes. \
                    {'georel not set!' if georel is None else ''} \
                    {'geometry not set!' if geometry is None else ''} \
                    {'coords not set!' if coords is None else ''}")

        logger.debug("REQUEST to %s\n %s ", self.url_entities, fields)
        response = self._request(method="GET",
                                 url=self.url_entities,
                                 headers=headers,
                                 fields=fields)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.info("Not found: %s, \nfields: %s", self.url_entities,
                            fields)
                return []
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))
        return int(response.headers["fiware-total-count"])

    def search(self,
               entity_type=None,
               id_pattern=None,
               query=None,
               georel=None,
               geometry=None,
               coords=None,
               limit=0,
               offset=0,
               key_values=False,
               hierarchical_search=False):
        """ Get the list of the entities that match the provided entity class, id pattern and/or query.

        :param entity_type: The entity type that the entities must match .
        :param id_pattern: The entity id pattern that the entities must match.
        :param query: The query that the entities must match.
        :param limit: The limit of returned entities. Zero, the default value, means 1000 (max limit of Orion)
        :param offset: The offset of returned entities, for paginated search.
        :param geometry: Geometry form used to spacial limit the query: "point", "line", "polygon", "box"
        :param georel: Relation between the geometry an  the entities: "coveredBy", "intersects", "equals", "disjoint"
        :param coords: Semicolon separated list of coordinates(coma separated) Ex: "45.7878,3.455454;41.7878,5.455454"
        :param key_values: Wether a full NGSIv2 entity should be returned or only a keyValues model
        :param hierarchical_search: Search only in this servicePath or in all sub servicePaths as well

        :return: A list of entities or None
        """
        options = "count"
        if key_values:
            options = options + ",keyValues"
        fields = {
            "options": options,
            "limit": limit if limit and limit <= 1000 else 1000
        }
        if offset:
            fields["offset"] = offset
        if entity_type:
            fields["type"] = entity_type
        if id_pattern:
            fields["idPattern"] = id_pattern
        if query:
            fields["q"] = query

        headers = self.header_no_payload.copy()
        if hierarchical_search:
            if not self._service_path:
                raise FiException(
                    None,
                    "Hierarchical search does not work without service path.")
            elif ", " in self._service_path:
                headers["Fiware-ServicePath"] = ", ".join(
                    [sp + "/#" for sp in self.service_path.split(", ")])
            else:
                headers["Fiware-ServicePath"] = self.service_path + "/#"

        if georel and geometry and coords:
            if not (georel
                    in ["coveredBy", "intersects", "equals", "disjoint"]
                    or georel.startswith("near")):
                raise FiException(
                    None,
                    f"({georel}) is not a valid spatial relationship(georel).")
            if geometry not in ["point", "line", "polygon", "box"]:
                raise FiException(None,
                                  f"({geometry}) is not a valid geometry.")
            fields["georel"] = georel
            fields["geometry"] = geometry
            fields["coords"] = coords

        elif georel or geometry or coords:
            raise FiException(
                None,
                f"Geographical Queries requires  georel, geometry and coords  attributes. \
                    {'georel not set!' if georel is None else ''} \
                    {'geometry not set!' if geometry is None else ''} \
                    {'coords not set!' if coords is None else ''}")

        logger.debug("REQUEST to %s\n %s ", self.url_entities, fields)
        response = self._request(method="GET",
                                 url=self.url_entities,
                                 headers=headers,
                                 fields=fields)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.info("Not found: %s, \nfields: %s", self.url_entities,
                            fields)
                return []
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))
        results = json.loads(response.data.decode(self.codec))
        total_count = int(response.headers["fiware-total-count"])
        count = len(results)
        if not limit:
            limit = total_count
        if total_count - offset >= limit > count:
            results.extend(
                self.search(entity_type=entity_type,
                            id_pattern=id_pattern,
                            query=query,
                            georel=georel,
                            geometry=geometry,
                            coords=coords,
                            limit=limit - count,
                            offset=offset + count,
                            key_values=key_values,
                            hierarchical_search=hierarchical_search))

        return results

    def delete(self, entity_id, silent=False, entity_type=None):
        """Delete a entity  from the Context broker.

        :param entity_type: Restrict the search to specific type.
        :param entity_id: Id of the entity to erase.
        :param silent: Not produce error if the entity is not found.

        :return: Nothing
        """
        get_url = self.url_entities + '/' + entity_id
        if entity_type:
            get_url += "?type=" + entity_type

        response = self._request(method="DELETE",
                                 url=get_url,
                                 headers=self.header_no_payload)
        if response.status // 200 != 1:
            if response.status != 404 or not silent:
                logger.debug("Not found: %s", get_url)
                raise FiException(
                    response.status,
                    "Error{}: {}".format(response.status,
                                         response.data.decode(self.codec)))

    def create(self, element_id, element_type, **attributes):
        body = {'id': element_id, "type": element_type}

        for key in attributes:
            type_name = type(attributes[key]).__name__.capitalize()
            if type_name == "Str":
                type_name = "String"
            if type_name == "Int":
                type_name = "Integer"
            if type_name == "Dict":
                type_name = "StructuredValue"
            body[key] = {'value': attributes[key], "type": type_name}

        self.create_raw(element_id, element_type, **body)

    def create_raw(self, element_id, element_type, **attributes):
        """ Create a Entity in the context broker. The entities can be passed as parameters or as a dictionary with **
        or attributes.

        Examples:

            fiware_manager.create(element_id="1", element_type="fake", **{'weight': 300, 'size': "100l"})
            fiware_manager.create(element_id="1", element_type="fake", attributes= {'weight': 300, 'size': "100l"})
            fiware_manager.create(element_id="1", element_type="fake", weight=300, size="100l")

        :param element_id: The ID of the entity
        :param element_type: The Type on the entity
        :param attributes:  The attributes for the entity.

        :return: Nothing
        """
        if element_id:
            attributes["id"] = element_id
        if element_type:
            attributes["type"] = element_type

        response = self._request(method="POST",
                                 url=self.url_entities,
                                 body=attributes,
                                 headers=self.header_payload)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def patch(self, element_id, element_type, **attributes):

        url = self.url_entities + "/" + element_id + "/attrs?type=" + element_type

        response = self._request(method="PATCH",
                                 url=url,
                                 body=attributes,
                                 headers=self.header_payload)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.debug("Not found: %s", url)
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def update(self, element_id, element_type, **attributes):
        url = self.url_entities + "/" + element_id + "/attrs?type=" + element_type

        response = self._request(method="POST",
                                 url=url,
                                 body=attributes,
                                 headers=self.header_payload)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.debug("Not found: %s", url)
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def delete_attribute(self, element_id, element_type, attribute_name):
        url = self.url_entities + "/" + element_id + "/attrs/" + attribute_name + "?type=" + element_type

        response = self._request(method="DELETE", url=url)
        if response.status // 200 != 1:
            if response.status == 404:
                logger.debug("Not found: %s", url)
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def batch_update(self, action_type, entities):
        """ Create/Modify/Delete multiple entities at once in the context broker.

        Examples:

            fiware_manager.batch_update(action_type="append", entities=[{"type": "Room", "id": "Room3", "temperate": {"value": 29.9, "type": "Float"}}])

        :param action_type: Can be one of "append", "appendStrict", "update", "delete" or "replace"
        :param entities: A list of entities

        :return: Nothing
        """

        body = {"actionType": action_type, "entities": entities}

        response = self._request(method="POST",
                                 url=self.url_batch_update,
                                 body=body,
                                 headers=self.header_payload)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def unsubscribe(self, url=None, subscription_id=None):
        if (url is None) == (subscription_id is None):
            raise FiException(None, "Set URL or subscription_id")

        subscription_url = ""
        if url:
            subscription_url = "/".join((self.host, url))
        elif subscription_id:
            subscription_url = "/".join(
                (self.url_subscriptions, subscription_id))

        response = self._request(method="DELETE", url=subscription_url)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

    def subscribe(self,
                  description,
                  entities,
                  condition_attributes=None,
                  condition_expression=None,
                  notification_attrs=None,
                  notification_attrs_blacklist=None,
                  http=None,
                  http_custom=None,
                  attrs_format=None,
                  metadata=None,
                  expires=None,
                  throttling=None):

        subscription = {"description": description}

        # General
        if expires:
            subscription["expires"] = expires
        if throttling:
            subscription["throttling"] = throttling

        # subject
        condition = {}
        if condition_attributes:
            condition["attrs"] = condition_attributes
        if condition_expression:
            condition["expression"] = condition_expression

        subscription["subject"] = {
            "entities": entities,
            "condition": condition
        }

        # Notification
        notification = {}

        # Check if one and only one is defined
        if notification_attrs and notification_attrs_blacklist:
            raise Exception(
                "One and only one of notification_attrs and notification_attrs_blacklist can be set"
            )
        if notification_attrs:
            notification["attrs"] = notification_attrs
        if notification_attrs_blacklist:
            notification["exceptAttrs"] = notification_attrs_blacklist

        # Check if one and only one is defined
        if (http is None) == (http_custom is None):
            raise Exception(
                "One and only one of http and http_custom can be set")
        if http:
            notification["http"] = {"url": http}

        if http_custom:
            notification["httpCustom"] = http_custom

        if attrs_format:
            notification["attrsFormat"] = attrs_format
        if metadata:
            notification["metadata"] = metadata

        subscription["notification"] = notification

        response = self._request(method="POST",
                                 url=self.url_subscriptions,
                                 body=subscription,
                                 headers=self.header_payload)

        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))

        return response.headers["location"].split(
            '/')[-1], response.headers["location"]

    def subscription(self, subscription_id=None):
        fields = {}
        url = self.url_subscriptions

        if subscription_id:
            url += '/' + subscription_id
        response = self._request(method="GET",
                                 url=url,
                                 fields=fields,
                                 headers=self.header_no_payload)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))
        data = json.loads(response.data.decode(self.codec))
        return data

    def subscriptions(self, limit=None, offset=None, count=False):
        fields = {}
        url = self.url_subscriptions
        if limit:
            fields["limit"] = limit
        if offset:
            fields["offset"] = offset
        if count:
            fields["options"] = '{"count": True}'

        response = self._request(method="GET",
                                 url=url,
                                 fields=fields,
                                 headers=self.header_no_payload)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))
        data = json.loads(response.data.decode(self.codec))
        if type(data) == list and len(data) == 1:
            data = data[0]

        return data

    def subscription_update(self,
                            subscription_id,
                            status=None,
                            description=None,
                            entities=None,
                            condition_attributes=None,
                            condition_expression=None,
                            notification_attrs=None,
                            notification_attrs_blacklist=None,
                            http=None,
                            http_custom=None,
                            attrs_format=None,
                            metadata=None,
                            expires=None,
                            throttling=None):

        subscription = {}
        if status:
            subscription["status"] = status
        if description:
            subscription["description"] = description
        # General
        if expires:
            subscription["expires"] = expires
        if throttling:
            subscription["throttling"] = throttling

        # subject
        condition = {}
        if condition_attributes:
            condition["attrs"] = condition_attributes
        if condition_expression:
            condition["expression"] = condition_expression

        subject = {}
        if entities:
            subject["entities"] = entities
        if condition:
            subject["condition"] = condition

        if subject:
            subscription["subject"] = subject

        # Notification
        notification = {}
        # Check if only one is defined
        if notification_attrs and notification_attrs_blacklist:
            raise Exception(
                "One and only one of notification_attrs and notification_attrs_blacklist can be set"
            )
        if notification_attrs:
            notification["attrs"] = notification_attrs
        if notification_attrs_blacklist:
            notification["exceptAttrs"] = notification_attrs_blacklist

        # Check if only one is defined
        if http and http_custom:
            raise Exception(
                "One and only one of http and http_custom must be set: http or http_custom"
            )
        if http:
            notification["http"] = {"url": http}

        if http_custom:
            notification["httpCustom"] = http_custom

        if attrs_format:
            notification["attrsFormat"] = attrs_format
        if metadata:
            notification["metadata"] = metadata

        if notification:
            subscription["notification"] = notification

        response = self._request(method="PATCH",
                                 url=self.url_subscriptions + "/" +
                                 subscription_id,
                                 body=subscription,
                                 headers=self.header_payload)
        if response.status // 200 != 1:
            raise FiException(
                response.status,
                "Error{}: {}".format(response.status,
                                     response.data.decode(self.codec)))
Example #37
0
class OAuthManager:
    def __init__(self,
                 oauth_server_url=None,
                 client_id=None,
                 client_secret=None,
                 user=None,
                 password=None,
                 codec="utf-8",
                 token=None,
                 refresh_token=None,
                 secure_lapse=10,
                 scopes=None):
        self.codec = codec
        self.oauth_server = oauth_server_url
        self.user = user
        self.password = password
        self._client_id = client_id
        self._client_secret = client_secret
        self._encode()
        self.PM = PoolManager()
        self._bearer = None
        self._token = token
        self._refresh_token = refresh_token
        self._expiration = None
        self.secure_lapse = secure_lapse
        self.scopes = scopes

    def _encode(self, ):
        self.packed_Auth = b64encode(
            bytes("{0}:{1}".format(self._client_id, self._client_secret),
                  self.codec)).decode(self.codec)

    @property
    def token(self):
        if self._token:
            if self.expired:
                try:
                    self._refresh_orion()
                except:
                    self._login()
        else:
            self._login()
        if self._bearer.lower() == "bearer":
            return f"Bearer {self._token}"
        return self._token

    @property
    def expired(self):
        if self._expiration is not None:
            return time() >= self._expiration - self.secure_lapse
        return False

    @property
    def client_id(self):
        return self._client_id

    @client_id.setter
    def client_id(self, value):
        self._client_id = value
        self._encode()

    @property
    def client_secret(self):
        return self._client_secret

    @client_secret.setter
    def client_secret(self, value):
        self.client_secret = value
        self._encode()

    def _login(self):

        url = self.oauth_server + "/token"
        headers = {
            "Authorization": "BASIC " + self.packed_Auth,
            "Content-Type": "application/x-www-form-urlencoded"
        }
        body = "grant_type=password&username="******"&password="******"&scope=" + " ".join(self.scopes)

        logger.debug("URL %s\nHEADERS %s\nBODY %s\n", url, headers, body)
        try:
            r = self.PM.request(method='POST',
                                url=url,
                                headers=headers,
                                body=body)
        except Exception as ex:
            logger.warning("Unable to get Auth token: %s", ex)
            return None
        if r.status // 100 != 2:
            raise Exception("Orion Failed:%s ", r.status)

        response = loads(r.data.decode(self.codec))
        logger.debug(response)
        self._bearer = response["token_type"]
        self._token = response["access_token"]
        self._refresh_token = response["refresh_token"]
        self._expiration = time() + response["expires_in"]

    def _refresh_orion(self):

        url = self.oauth_server + "/token"
        headers = {
            "Authorization": "BASIC " + self.packed_Auth,
            "Content-Type": "application/x-www-form-urlencoded"
        }
        body = "grant_type=refresh_token&refresh_token=" + self._refresh_token

        logger.debug("URL %s\nHEADERS %s\nBODY %s\n", url, headers, body)

        r = self.PM.request(method='POST', url=url, headers=headers, body=body)
        if r.status // 100 != 2:
            raise Exception("Orion Failed:%s ", r.status)

        response = loads(r.data.decode(self.codec))
        logger.debug(response)
        self._bearer = response["token_type"]
        self._token = response["access_token"]
        self._refresh_token = response["refresh_token"]
        self._expiration = time() + response["expires_in"]
Example #38
0
from urllib3 import PoolManager
import json
if __name__ == '__main__':
    req = {
        'header': {
            'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B137 Safari/601.1'
        },
        'url': 'https://httpbin.org/anything',
        'data': {
            'msg': 'welcomeuser',
            'isadmin': 1
        }
    }

    http = PoolManager()
    res = http.request(
        'POST',
        req['url'],
        fields=req['data'],
        headers=req['header']
    )

    print(json.loads(res.data.decode('utf-8'))['headers'])
Example #39
0
import json
from google.appengine.ext.webapp import template
from google.appengine.ext import vendor
# Add any libraries install in the "lib" folder.
vendor.add(os.path.join(os.path.dirname(os.path.realpath(__file__)),'lib'))

# third party libraries
from urllib3 import PoolManager
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox

if is_appengine_sandbox():
    # AppEngineManager uses AppEngine's URLFetch API behind the scenes
    http = AppEngineManager()
else:
    # PoolManager uses a socket-level API behind the scenes
    http = PoolManager()

import requests_toolbelt.adapters.appengine
from watson_developer_cloud import PersonalityInsightsV3
from watson_developer_cloud import WatsonApiException

requests_toolbelt.adapters.appengine.monkeypatch()

PAGES = {
    'home':'/',
    'results':'/results',
}

class MainPage(webapp2.RequestHandler):
    def get(self):
        path = os.path.join(os.path.dirname(__file__), 'template')
class SkuGen(object):
	def __init__(self):
		self.tableManager = Tables()
		self.outdb = databaseManager('sku','skudatabase')
		self.indb = databaseManager('matching','la1018')
		self.pool = PoolManager(REQUEST_AMOUNT)
		self.request = self.pool.connection_from_url(URL)

	def api_request(self, key):
		api = '?key='+API_KEY
		url = URL + key + api
		data = self.request.urlopen('GET', url)
		value = data.data
		data.close()
		print value
		try:
			out = json.loads(value)
			return out
		except Exception:
			pass

	def googleRequest(self, key):
		callback = 'response_dict'
		context = 'bar'
		query = 'q='+key 
		callback = '&callback=' + callback
		cont = '&context='+context

		url = GOOGLE + query
		response = urlopen(url)
		html = response.read()
		out = json.loads(html)
		out = out['responseData']
			
		return out['results'], getTop(out['results'])

	def getTop(self, arr):
		top = arr[0]
		title = top['title']
		url = top['url']
		return title, url
	
	def api_process(self, api_dict):
		api_dict = api_dict['basic']
		if 'name' in api_dict:
			name = api_dict['name']
			return name
	
	def siteObject(self, item):
		site = dict()
		site['url'] = item['url']
		site['price'] = item['price']
		site['name'] = item['name']
		site['description'] = item['description']
		site['image'] = item['image']
		site['price_str'] = item['price_str']
		site['volume'] = item['volume']
		return site
	def newItem(self, sku, name, item):

		new_item = dict()
		new_item['sku'] = sku
		new_item['name'] = name
		new_item['brand'] = item['brand']
		new_item['category'] =item['category']
		new_item['sites'] = []
		return new_item		


	def querySku(self, integer, site):
		collection = self.indb.getCollection()
		count = 0
		for item in collection.find({'site': site}).limit(integer):
			sku = item['sku']
			_sku = str(utils.extractSku(sku))
			print 'querying sku : %s' % sku
			out = self.api_request(_sku)
			try:
				name = self.api_process(out)
				print name 
				count = count + 1
				print count
				if name:
					
					new_item = self.newItem(sku,name,item)	
					site = self.siteObject(item)
					site['sku'] = sku
					new_item['sites'].append(site)
					self.outdb.updateViaSku(new_item)
			
			except Exception:
				pass		
Example #41
0
status_list = (500, 502, 503, 504)

urllib3_retry = {
    'total': 5,
    'backoff_factor': 0.2,
    'status_forcelist': status_list
}

urllib3_poolmanager = {
    'timeout': Timeout.DEFAULT_TIMEOUT,
    'cert_reqs': 'CERT_REQUIRED',
    'ca_certs': '/tmp/minio.crt',
    'retries': Retry(**urllib3_retry)
}

http_client = PoolManager(**urllib3_poolmanager)

params_conn = {
    'endpoint': '192.168.56.2:9000',
    'access_key': '5D0EN4B8SF7EYUJH1V8I',
    'secret_key': 'BjtgRNuLHBSEUU4e2Yzz/lBVjoFVnIvCr6rGFRXo',
    'secure': True,
    'http_client': http_client
}

conn = Minio(**params_conn)

try:
    conn.make_bucket('foo')

except BucketAlreadyOwnedByYou as err:
Example #42
0
    def __init__(self,
                 license_id='default',
                 license_file="~/.BoonLogic.license",
                 timeout=120.0,
                 verify=True,
                 cert=None):
        """Primary handle for BoonNano Pod instances

        The is the primary handle to manage a nano pod instance

        Args:
            license_id (str): license identifier label found within the .BoonLogic.license configuration file
            license_file (str): path to .BoonLogic license file
            timeout (float): read timeout for http requests
            verify:  Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use
            cert (bool): if String, path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’) pair.
        

        Environment:
            BOON_LICENSE_FILE: sets license_file path
            BOON_LICENSE_ID: sets license_id
            BOON_API_KEY: overrides the api-key as found in .BoonLogic.license file
            BOON_API_TENANT: overrides the api-tenant as found in .BoonLogic.license file
            BOON_SERVER: overrides the server as found in .BoonLogic.license file
            PROXY_SERVER: overrides the proxy server as found in .BoonLogic.license file
            BOON_SSL_CERT: path to ssl client cert file (.pem)
            BOON_SSL_VERIFY: Either a boolean, in which case it controls whether we verify the server’s TLS certificate, or a string, in which case it must be a path to a CA bundle to use


        Example:
            ```python
            try:
                nano = bn.NanoHandle()
            except bn.BoonException as be:
                print(be)
                sys.exit(1)
            ```

        """
        self.license_id = None
        self.api_key = None
        self.api_tenant = None
        self.instance = ''
        self.numeric_format = ''

        env_license_file = os.environ.get('BOON_LICENSE_FILE', None)
        env_license_id = os.environ.get('BOON_LICENSE_ID', None)
        env_api_key = os.environ.get('BOON_API_KEY', None)
        env_api_tenant = os.environ.get('BOON_API_TENANT', None)
        env_server = os.environ.get('BOON_SERVER', None)
        env_proxy_server = os.environ.get('PROXY_SERVER', None)
        env_cert = os.environ.get('BOON_SSL_CERT', None)
        env_verify = os.environ.get('BOON_SSL_VERIFY', None)

        # certificates
        self.cert = 'CERT_REQUIRED' if env_cert else {
            None: 'CERT_NONE',
            True: 'CERT_REQUIRED'
        }[cert]
        if env_verify:
            if env_verify.lower() == 'false':
                self.verify = False
            elif env_verify.lower() == 'true':
                self.verify = True
            else:
                self.verify = env_verify
        else:
            self.verify = verify

        # when license_id comes in as None, use 'default'
        if license_id is None:
            license_id = 'default'

        license_file = env_license_file if env_license_file else license_file
        self.license_id = env_license_id if env_license_id else license_id

        license_path = os.path.expanduser(license_file)
        if not os.path.exists(license_path):
            raise BoonException(
                "license file {} does not exist".format(license_path))
        try:
            with open(license_path, "r") as json_file:
                file_data = json.load(json_file)
        except json.JSONDecodeError as e:
            raise BoonException(
                "json formatting error in .BoonLogic.license file, {}, line: {}, col: {}"
                .format(e.msg, e.lineno, e.colno))
        try:
            license_data = file_data[self.license_id]
        except KeyError:
            raise BoonException(
                "license_id \"{}\" not found in license file".format(
                    self.license_id))

        try:
            self.api_key = env_api_key if env_api_key else license_data[
                'api-key']
        except KeyError:
            raise BoonException(
                "\"api-key\" is missing from the specified license in license file"
            )

        try:
            self.api_tenant = env_api_tenant if env_api_tenant else license_data[
                'api-tenant']
        except KeyError:
            raise BoonException(
                "\"api-tenant\" is missing from the specified license in license file"
            )

        try:
            self.server = env_server if env_server else license_data['server']
        except KeyError:
            raise BoonException(
                "\"server\" is missing from the specified license in license file"
            )

        self.proxy_server = env_proxy_server
        if not self.proxy_server and 'proxy-server' in license_data.keys():
            self.proxy_server = license_data['proxy-server']

        # set up base url
        self.url = self.server + '/expert/v3/'
        if "http" not in self.server:
            self.url = "http://" + self.url

        # create pool manager
        timeout_inst = Timeout(connect=30.0, read=timeout)
        if self.proxy_server:
            # proxy pool
            self.http = ProxyManager(self.proxy_server,
                                     maxsize=10,
                                     timeout=timeout_inst,
                                     cert_reqs=self.cert)
        else:
            # non-proxy pool
            self.http = PoolManager(timeout=timeout_inst, cert_reqs=self.cert)
from termcolor import colored
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('config.ini')

init()
print
print(colored(' => START <=', 'yellow', 'on_blue'))
print

if __name__ == '__main__':
    import sys
model = sys.argv[1]

url = 'https://chaturbate.com/{}/'.format(model)
manager = PoolManager(10)
r = manager.request('GET', url)
enc = (r.data)
dec = urllib.unquote(enc)

if 'HTTP 404' not in dec:
    try:
        pwd0 = dec.split('broadcaster_username')[1]
        pwd = pwd0.split(':')[0]
    except:
        print(colored(' => Wrong model name or banned <=', 'yellow', 'on_red'))
        print
        print(colored(' => END <=', 'yellow', 'on_blue'))
        sys.exit()

    if 'u0022offline' not in dec:
 def __init__(self, error_dict):
     self.redis_service = RedisService()
     self.error_proxy_dict = error_dict
     self.connection_pool = PoolManager()
	def __init__(self):
		self.tableManager = Tables()
		self.outdb = databaseManager('sku','skudatabase')
		self.indb = databaseManager('matching','la1018')
		self.pool = PoolManager(REQUEST_AMOUNT)
		self.request = self.pool.connection_from_url(URL)
Example #46
0
    def __init__(self):
        self.pool = PoolManager(10)

        logger = logging.getLogger('urllib3.connectionpool')
        logger.setLevel(logging.WARNING)
Example #47
0
 def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
     self.poolmanager = PoolManager(num_pools=connections,
                                    maxsize=maxsize,
                                    block=block,
                                    ssl_version=ssl.PROTOCOL_TLSv1_2)
from requests.adapters import HTTPAdapter
s = requests.Session()
# rule-id: missed-retries-on-access
s.mount('https://bronevichok.ru/', requests.adapters.HTTPAdapter(x))

from urllib3 import PoolManager, Retry
# rule-id: missed-retries-on-access
http = PoolManager()
response = http.request('GET', 'https://bronevichok.ru/')
Example #49
0
def GetWeatherData(WU) :
    global WU_Last_Observation_Epoch
    global WU_Num_Calls_Made_Today
    global WU_Today
    i = 1
    for PWS in WU_PWS :
        try:
            status = 0
            pm = PoolManager()
            r = pm.request('GET', 'http://api.wunderground.com/api/' + WU_API_KEY +
                            '/alerts/astronomy/conditions/forecast/q/' + WU_STATE_CODE +
                            '/pws:' + PWS + '.json')
            # if it's tomorrow, zero out the number of calls made today
            if WU_Today != datetime.date.today() :
                WU_Today = datetime.date.today()
                WU_Num_Calls_Made_Today = 0
            # add 1 to the number of calls made today
            WU_Num_Calls_Made_Today = WU_Num_Calls_Made_Today + 1
            status = 1
            co = json.loads(r.data.decode('utf-8'))['current_observation']
            if co['temp_f'] is None :
                WGErrorPrint("GetWeatherData", PWS +
                                " temp_f is None.  Calls Today = " +
                                str(WU_Num_Calls_Made_Today))
                continue
            if WU_Last_Observation_Epoch < int(co['observation_epoch']) :
                # we got new data
                WU_Last_Observation_Epoch = int(co['observation_epoch'])
                WU['pws'] = i
                WU['observation_time'] = str(datetime.datetime.fromtimestamp(WU_Last_Observation_Epoch).strftime('Upd: %Y-%m-%d %H:%M:%S (')) + PWS + ')'
                WU['weather'] = co['weather']
                WU['temp_f'] = co['temp_f']
                WU['temp_c'] = co['temp_c']
                WU['relative_humidity'] = co['relative_humidity']
                WU['wind_dir'] = co['wind_dir']
                WU['wind_mph'] = co['wind_mph']
                WU['wind_gust_mph'] = co['wind_gust_mph']
                WU['wind_kph'] = co['wind_kph']
                WU['wind_gust_kph'] = co['wind_gust_kph']
                WU['pressure_mb'] = co['pressure_mb']
                WU['pressure_in'] = co['pressure_in']
                WU['feelslike_f'] = co['feelslike_f']
                WU['feelslike_c'] = co['feelslike_c']
                WU['visibility_mi'] = co['visibility_mi']
                WU['visibility_km'] = co['visibility_km']
                WU['precip_today_in'] = co['precip_today_in']
                WU['precip_today_metric'] = co['precip_today_metric']
                status = 2
                fc = json.loads(r.data.decode('utf-8'))['forecast']
                i = 0
                WU['fc'] = [dict(), dict(), dict(), dict()]
                for day in fc['simpleforecast']['forecastday'] :
                    WU['fc'][i] = dict()
                    WU['fc'][i]['name'] = day['date']['weekday']
                    WU['fc'][i]['high_f'] = day['high']['fahrenheit'] + "°F"
                    WU['fc'][i]['high_c'] = day['high']['celsius'] + "°C"
                    WU['fc'][i]['low_f'] = day['low']['fahrenheit'] + "°F"
                    WU['fc'][i]['low_c'] = day['low']['celsius'] + "°C"
                    WU['fc'][i]['icon'] = day['icon']
                    if (WU['fc'][i]['icon'] == "chancerain") :
                        WU['fc'][i]['icon'] = "poss rain"
                    elif (WU['fc'][i]['icon'] == "partlycloudy") :
                        WU['fc'][i]['icon'] = "partly cloudy"
                    elif (WU['fc'][i]['icon'] == "mostlycloudy") :
                        WU['fc'][i]['icon'] = "mostly cloudy"
                    elif (WU['fc'][i]['icon'] == "chanceflurries") :
                        WU['fc'][i]['icon'] = "poss flurries"
                    elif (WU['fc'][i]['icon'] == "chancesleet") :
                        WU['fc'][i]['icon'] = "poss sleet"
                    elif (WU['fc'][i]['icon'] == "chancesnow") :
                        WU['fc'][i]['icon'] = "poss snow"
                    elif (WU['fc'][i]['icon'] == "chancetstorms") :
                        WU['fc'][i]['icon'] = "poss T-storms"
                    elif (WU['fc'][i]['icon'] == "mostlysunny") :
                        WU['fc'][i]['icon'] = "mostly sunny"
                    elif (WU['fc'][i]['icon'] == "partlysunny") :
                        WU['fc'][i]['icon'] = "mostly sunny"
                    elif (WU['fc'][i]['icon'] == "tstorms") :
                        WU['fc'][i]['icon'] = "T-storms"
                    WU['fc'][i]['icon_url'] = day['icon_url']
                    i = i + 1
                    if i > 3 :
                        break
                i = 0
                WU['fctxt'] = [dict(), dict(), dict(), dict(), dict(), dict(), dict(), dict()]
                for day in fc['txt_forecast']['forecastday'] :
                    WU['fctxt'][i]['fcttext'] = day['fcttext']
                    WU['fctxt'][i]['fcttext_metric'] = day['fcttext_metric']
                    i = i + 1
                    if i > 7 :
                        break
                status = 3
                sp = json.loads(r.data.decode('utf-8'))['sun_phase']
                WU['sunrise'] = '%s:%s AM' % (sp['sunrise']['hour'], sp['sunrise']['minute'])
                WU['sunset'] = '%s:%s PM' % (str(int(sp['sunset']['hour']) - 12), sp['sunset']['minute'])
                status = 4
                mp = json.loads(r.data.decode('utf-8'))['moon_phase']
                WU['ageOfMoon'] = int(mp['ageOfMoon'])
                # Sometimes the moonrise comes across as null.  Handle it gracefully
                if mp['moonrise']['hour'] == "" :
                    WU['moonrise'] = "N/A"
                else :
                    x = int(mp['moonrise']['hour'])
                    if x >= 12 :
                        ampm = "PM"
                    else :
                        ampm = "AM"
                    if x > 12 : # after noon
                        x = x - 12
                    elif x == 0 : # midnight
                        x = 12
                    WU['moonrise'] = '%s:%s %s' % (x, mp['moonrise']['minute'], ampm)
                # Sometimes the moonset comes across as null.  Handle it gracefully
                if mp['moonset']['hour'] == "" :
                    WU['moonset'] = "N/A"
                else :
                    x = int(mp['moonset']['hour'])
                    if x > 12 :
                        x = x - 12
                        ampm = "PM"
                    else :
                        ampm = "AM"
                    WU['moonset'] = '%s:%s %s' % (x, mp['moonset']['minute'], ampm)
                status = 5
                al = json.loads(r.data.decode('utf-8'))['alerts']
                WU['alerts'] = []
                for alert in al :
                    WU['alerts'].append(alert['message'])
                return True
            else :
                i = i + 1
                WGErrorPrint("GetWeatherData", PWS +
                                " data not newer than last weather data.  Calls Today = " +
                                str(WU_Num_Calls_Made_Today))
        except :
            i = i + 1
            WGErrorPrint("GetWeatherData", "Weather Collection Error #1 (PWS = " + PWS +
                            ", status = " + str(status) + ", Calls today = " +
                            str(WU_Num_Calls_Made_Today) + ")")
            continue # Try the next closest 
    return False # didn't find any newer PWS
Example #50
0
    def __init__(self):
        user_agent = {'user-agent': 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'}
 
        self.http = PoolManager(headers=user_agent)
Example #51
0
class WeatherGetter:
    """A simple WeatherGetter class to fetch the weather for a given URL
    
    TODO: Refactor this into a generic WeatherGetter class.
    """

    moncton = 'https://weather.gc.ca/rss/city/nb-36_e.xml'

    def __init__(self):
        self.http = PoolManager()
        self.location = self.moncton  # TODO generic
        self.weather = ''
        self.lastUpdated = ''
        self.warnings = ''
        self.condition = ''
        self.lastUpdated = ''
        self.forecasts = []

    def get_page(self):
        self.response = self.http.request('GET', self.location)
        if self.response.status == 200:
            self.weather = BeautifulSoup(self.response.data, "lxml-xml")
            return True

        return False

    def get_warning(self):
        """Return the text of the current weather warnings, otherwise empty string"""

        return self.warnings

    def get_condition(self):
        """Return the text of the current weather condition, otherwise empty string"""

        return self.condition

    def get_last_updated(self):
        return self.lastUpdated

    def parse_weather(self):
        """private method, used to parse weather page and extract current conditions and alerts"""

        if not self.weather:
            return False

        self.lastUpdated = self.weather.find("updated").get_text()

        entries = self.weather.find_all("entry")

        self.warnings = ''
        self.condition = ''
        self.forecasts = []

        for entry in entries:
            category = entry.find("category").get("term")
            if category == 'Warnings and Watches':
                self.warnings = entry.find("title").get_text()
                continue
            if category == 'Current Conditions':
                self.condition = entry.find("summary").get_text()
                continue
            if category == 'Weather Forecasts':
                self.forecasts.append(entry)
                continue

            print(f"Unknown entry: {category}")

        return True
Example #52
0
class Api(object):
    __pool = None

    def __init__(self, configuration: Configuration, pools_size: int = 4):
        self.configuration = configuration

        self.header_params: dict = {}
        self.user_agent = 'felix-scholz/website-python-client/1.0.1/python'

        addition_pool_args = {}
        if configuration.assert_hostname is not None:
            addition_pool_args[
                'assert_hostname'] = configuration.assert_hostname

        if configuration.retries is not None:
            addition_pool_args['retries'] = configuration.retries

        if configuration.proxy:
            self.pool_manager = ProxyManager(
                num_pools=pools_size,
                maxsize=configuration.connection_pool_maxsize
                if not None else 4,
                cert_reqs=ssl.CERT_REQUIRED
                if configuration.verify_ssl else ssl.CERT_NONE,
                ca_certs=configuration.ssl_ca_cert
                if configuration.ssl_ca_cert is not None else certifi.where(),
                cert_file=configuration.cert_file,
                key_file=configuration.key_file,
                proxy_url=configuration.proxy,
                proxy_headers=configuration.proxy_headers,
                **addition_pool_args)
        else:
            self.pool_manager = PoolManager(
                num_pools=pools_size,
                maxsize=configuration.connection_pool_maxsize
                if not None else 4,
                cert_reqs=ssl.CERT_REQUIRED
                if configuration.verify_ssl else ssl.CERT_NONE,
                ca_certs=configuration.ssl_ca_cert
                if configuration.ssl_ca_cert is not None else certifi.where(),
                cert_file=configuration.cert_file,
                key_file=configuration.key_file,
                **addition_pool_args)

    def __del__(self):
        if self.__pool:
            self.__pool.close()
            self.__pool.join()
            self.__pool = None

    @property
    def pool(self) -> ThreadPool:
        if self.__pool is None:
            self.__pool = ThreadPool(self.configuration.pool_threads)

        return self.__pool

    @property
    def user_agent(self) -> dict:
        return self.header_params['User-Agent']

    @user_agent.setter
    def user_agent(self, value: str) -> NoReturn:
        self.header_params['User-Agent'] = value

    @property
    def x_debug(self) -> Optional[str]:
        if 'Cookie' in self.header_params:
            matches = re.match(r'XDEBUG_SESSION=(?P<name>\w+)',
                               self.header_params['Cookie'])
            if matches is not None:
                return matches.group('name')

        return None

    @x_debug.setter
    def x_debug(self, session: str) -> NoReturn:
        if 'Cookie' not in self.header_params:
            self.header_params['Cookie'] = 'XDEBUG_SESSION=' + session
        elif self.x_debug is None:
            cookies = self.header_params['Cookie'].split(';')
            self.header_params['Cookie'] = ';'.join(
                cookies + ['XDEBUG_SESSION=' + session])

    @x_debug.deleter
    def x_debug(self) -> NoReturn:
        if 'Cookie' in self.header_params and self.x_debug is not None:
            self.header_params['Cookie'] = re.sub(
                r'XDEBUG_SESSION=(?P<name>\w+)', '',
                self.header_params['Cookie'])
            self.header_params['Cookie'].strip('; ')

    @property
    def accept(self) -> str:
        return self.header_params['Accept']

    @accept.setter
    def accept(self, accepts: List[str]) -> NoReturn:
        if not accepts:
            return

        accepts = [x.lower() for x in accepts]

        if 'application/json' in accepts:
            self.header_params['Accept'] = 'application/json'
        else:
            self.header_params['Accept'] = ', '.join(accepts)

    @property
    def content_type(self) -> str:
        return self.header_params['Content-Type']

    @content_type.setter
    def content_type(self, content_types: List[str]) -> NoReturn:
        if not content_types:
            self.header_params['Content-Type'] = 'application/json'

        content_types = [x.lower() for x in content_types]

        if 'application/json' in content_types or '*/*' in content_types:
            self.header_params['Content-Type'] = 'application/json'
        else:
            self.header_params['Content-Type'] = content_types[0]

    def auth(self, authentications: List[str]) -> List[Tuple[str, str]]:
        queries: List[Tuple[str, str]] = []
        auth_settings = self.configuration.auth_settings()
        for auth_name in auth_settings.keys():
            if auth_name in authentications:
                if not auth_settings[auth_name]['value']:
                    continue
                elif auth_settings[auth_name]['in'] == 'header':
                    self.header_params[auth_settings[auth_name][
                        'key']] = auth_settings[auth_name]['value']
                elif auth_settings[auth_name]['in'] == 'query':
                    queries.append((auth_settings[auth_name]['key'],
                                    auth_settings[auth_name]['value']))
            elif auth_settings[auth_name]['key'] in self.header_params:
                del self.header_params[auth_settings[auth_name]['key']]

        return queries

    def _call_api(
            self,
            resource_path: str,
            method: RequestMethod,
            path_params: Optional[List[Tuple[str, Union[int, str]]]],
            query_params: Optional[List[Tuple[Any, Any]]],
            body: Optional[Any],
            post_params: List[Tuple[str, Any]],
            files: List[Tuple[str, Tuple[str, bytes, str]]],
            _preload_content: Optional[Union[Model, type]] = None,
            _request_timeout: Optional[Union[Tuple[int, int], int]] = 300) \
            -> Tuple[Union[Model, List[Model], dict], int, HTTPHeaderDict]:

        if path_params is not None:
            for k, v in path_params:
                resource_path = resource_path.replace(
                    '{%s}' % k,
                    quote(str(v),
                          safe=self.configuration.safe_chars_for_path_param))

        post_params.extend(files)

        response_data = self.__request(
            method,
            self.configuration.host + resource_path,
            query_params=query_params,
            headers=self.header_params,
            post_params=post_params,
            body=body,
            _preload_content=True if _preload_content is not None else False,
            _request_timeout=_request_timeout)

        if _preload_content is not None:
            if type(_preload_content) in [Post, Category, File, Tag]:
                return _preload_content.deserialize(
                    response_data
                ), response_data.status, response_data.getheaders()
            elif _preload_content in [Post, Category, File, Tag]:
                return _preload_content.deserialize_all(
                    response_data
                ), response_data.status, response_data.getheaders()
        else:
            return json.loads(response_data)

    def __request(
        self,
        method: RequestMethod,
        url: str,
        query_params=None,
        headers=None,
        body: Optional[Union[str, bytes, dict]] = None,
        post_params: Optional[Any] = None,
        _preload_content: bool = True,
        _request_timeout: Optional[Union[Tuple[int, int], int]] = None
    ) -> RESTResponse:
        """Perform requests.

        :param method: http request method
        :param url: http request url
        :param query_params: query parameters in the url
        :param headers: http request headers
        :param body: request json body, for `application/json`
        :param post_params: request post parameters,
                            `application/x-www-form-urlencoded`
                            and `multipart/form-data`
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        """
        if post_params is not None and body is not None:
            raise ApiValueError(
                "body parameter cannot be used with post_params parameter.")

        post_params = post_params or {}
        headers = headers or {}

        timeout = None
        if _request_timeout:
            if isinstance(_request_timeout, int):
                timeout = Timeout(total=_request_timeout)
            elif (isinstance(_request_timeout, tuple)
                  and len(_request_timeout) == 2):
                timeout = Timeout(connect=_request_timeout[0],
                                  read=_request_timeout[1])

        if 'Content-Type' not in headers:
            headers['Content-Type'] = 'application/json'

        try:
            if method.value in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
                if query_params:
                    url += '?' + urlencode(query_params)
                if re.search('json', headers['Content-Type'], re.IGNORECASE):
                    request_body = None
                    if body is not None:
                        request_body = json.dumps(body)
                    r = self.pool_manager.request(
                        method.value,
                        url,
                        body=request_body,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                elif headers[
                        'Content-Type'] == 'application/x-www-form-urlencoded':  # noqa: E501
                    r = self.pool_manager.request(
                        method.value,
                        url,
                        fields=post_params,
                        encode_multipart=False,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                elif headers['Content-Type'] == 'multipart/form-data':
                    # must del headers['Content-Type'], or the correct
                    # Content-Type which generated by urllib3 will be
                    # overwritten.
                    del headers['Content-Type']
                    r = self.pool_manager.request(
                        method.value,
                        url,
                        fields=post_params,
                        encode_multipart=True,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                # Pass a `string` parameter directly in the body to support
                # other content types than Json when `body` argument is
                # provided in serialized form
                elif isinstance(body, str) or isinstance(body, bytes):
                    request_body = body
                    r = self.pool_manager.request(
                        method.value,
                        url,
                        body=request_body,
                        preload_content=_preload_content,
                        timeout=timeout,
                        headers=headers)
                else:
                    # Cannot generate the request from given parameters
                    msg = """Cannot prepare a request message for provided
                             arguments. Please check that your arguments match
                             declared content type."""
                    raise ApiException(status=0, reason=msg)
            # For `GET`, `HEAD`
            else:
                r = self.pool_manager.request(method.value,
                                              url,
                                              fields=query_params,
                                              preload_content=_preload_content,
                                              timeout=timeout,
                                              headers=headers)
        except SSLError as e:
            msg = "{0}\n{1}".format(type(e).__name__, str(e))
            raise ApiException(status=0, reason=msg)

        if _preload_content:
            r = RESTResponse(r)

        if r.status == 404:
            raise ApiNotFoundException(http_resp=r)

        if not 200 <= r.status <= 299:
            raise ApiException(http_resp=r)

        return r
class HttpRpcProvider(HTTPProvider):
    """ http rpc provider """
    def __init__(self, ssl_args, url, disable_conn_pool=False):
        """
        http rpc provider init

        :type  ssl_args: :class:`dict`
        :param ssl_args: ssl arguments
        :type  url: :class:`str`
        :param url: url to connected to
        :type disable_conn_pool: :class: 'bool'
        :param disable_conn_pool: disable connection pooling
        """
        HTTPProvider.__init__(self)
        self.ssl_enabled = False
        self.ssl_args = ssl_args

        scheme, host, port, user, password, path, _ = parse_addr_url(url)
        assert (scheme in ['http', 'https'])
        if scheme == 'https':
            self.ssl_enabled = True
        assert (user is None and password is None)  # NYI
        if host.startswith('!'):
            # Unix domain socket: hostname is '!' followed by
            # the URL-encoded socket path
            self.host = None
            self.uds = urllib.parse.unquote(host[1:])
            # SSL currently not supported for Unix domain sockets
            if self.ssl_enabled:
                raise Exception('SSL not supported on Unix domain sockets')
        else:
            self.host = host
            self.port = port
            self.uds = None
        self.path = path
        self.cookie = ''
        self.accept_compress_response = True

        global use_connection_pool
        if disable_conn_pool:
            use_connection_pool = False

        if self.uds is None and use_connection_pool:
            self.manager = PoolManager(num_pools=NUM_OF_POOL,
                                       maxsize=POOL_SIZE,
                                       timeout=CONNECTION_POOL_TIMEOUT,
                                       **self.ssl_args)

    def __del__(self):
        """ http rpc provider on delete """
        self.disconnect()

    def connect(self):
        """
        connect

        :rtype: :class:`vmware.vapi.protocol.client.rpc.provider.RpcProvider`
        :return: http rpc provider
        """
        return self

    def disconnect(self):
        """ disconnect """
        if use_connection_pool and self.manager is not None:
            self.manager.clear()

    def _get_connection(self):
        """
        get connection from pool

        :rtype: :class:`PoolManager` (or)
            :class:`UnixSocketConnection`
        :return: http(s) connection or unix socket connection
        """
        conn = None

        if self.uds:
            conn = UnixSocketConnection(self.uds)
        elif use_connection_pool:
            http_scheme = 'http'
            if self.ssl_enabled:
                http_scheme = 'https'
            conn = self.manager.connection_from_host(host=self.host,
                                                     port=self.port,
                                                     scheme=http_scheme)
        else:
            if self.ssl_enabled:
                conn = http_client.HTTPSConnection(host=self.host,
                                                   port=self.port,
                                                   **self.ssl_args)
            else:
                conn = http_client.HTTPConnection(host=self.host,
                                                  port=self.port)

        return conn

    def do_request(self, http_request):
        """
        Send an HTTP request

        :type  http_request: :class:`vmware.vapi.protocol.client.http_lib.HTTPRequest`    # pylint: disable=line-too-long
        :param http_request: The http request to be sent
        :rtype: :class:`vmware.vapi.protocol.client.http_lib.HTTPResponse`
        :return: The http response received
        """
        # pylint can't detect request, getresponse and close methods from
        # Http(s)Connection/UnixSocketConnection
        # pylint: disable=E1103
        request_ctx = http_request.headers
        request = http_request.body
        content_type = request_ctx.get('Content-Type')
        if not content_type:
            # For http, content-type must be set
            raise Exception('do_request: request_ctx content-type not set')

        response_ctx, response = {'Content-Type': content_type}, None
        if request:
            request_length = len(request)
            # Send request
            headers = {'Cookie': self.cookie, 'Content-Type': content_type}
            if self.accept_compress_response:
                headers['Accept-Encoding'] = 'gzip, deflate'

            try:
                conn = self._get_connection()
                logger.debug('do_request: request_len %d', request_length)

                if use_connection_pool:
                    resp = conn.request(method=http_request.method,
                                        url=self.path,
                                        body=request,
                                        headers=headers,
                                        preload_content=False)
                else:
                    conn.request(method=http_request.method,
                                 url=self.path,
                                 body=request,
                                 headers=headers)
                    resp = conn.getresponse()
            except:
                logger.exception('do_request() failed')
                raise

            # Debug
            # logger.debug('do_request: response headers', resp.getheaders())

            cookie = resp.getheader('Set-Cookie')
            if cookie:
                self.cookie = cookie

            status = resp.status
            if status in [200, 500]:
                try:
                    encoding = resp.getheader('Content-Encoding', 'identity').lower()  # pylint: disable=line-too-long
                    if encoding in ['gzip', 'deflate']:
                        response = resp.read(decode_content=True)
                    else:
                        response = resp.read()

                    logger.debug('do_request: response len %d', len(response))
                except:
                    conn.close()
                    raise
                else:
                    if resp:
                        resp.read()

                content_type = resp.getheader('Content-Type')
                if content_type:
                    response_ctx['Content-Type'] = content_type
            else:
                raise http_client.HTTPException('%d %s' % (resp.status, resp.reason))  # pylint: disable=line-too-long

            if self.cookie:
                response_ctx['Cookie'] = self.cookie
        return HTTPResponse(status=status, headers=response_ctx, body=response)
Example #54
0

log = getLogger("grolt")


snapshot_host = "live.neo4j-build.io"
snapshot_build_config_id = "Neo4j40_Docker"
snapshot_build_url = ("https://{}/repository/download/{}/"
                      "lastSuccessful".format(snapshot_host,
                                              snapshot_build_config_id))


teamcity_http = PoolManager(
    cert_reqs="CERT_REQUIRED",
    ca_certs=certifi.where(),
    headers=make_headers(basic_auth="{}:{}".format(
        getenv("TEAMCITY_USER", ""),
        getenv("TEAMCITY_PASSWORD", ""),
    )),
)


def resolve_image(image):
    """ Resolve an informal image tag into a full Docker image tag. Any tag
    available on Docker Hub for Neo4j can be used, and if no 'neo4j:' prefix
    exists, this will be added automatically. The default edition is
    Community, unless a cluster is being created in which case Enterprise
    edition is selected instead. Explicit selection of Enterprise edition can
    be made by adding an '-enterprise' suffix to the image tag.

    The pseudo-tag 'snapshot' will download the latest bleeding edge image
    from TeamCity. Note that this is a large download (600-700MB) and requires
Example #55
0
def check_vul(url):
    """
    Test if a GET to a URL is successful
    :param url: The URL to test
    :return: A dict with the exploit type as the keys, and the HTTP status code as the value
    """
    if gl_args.mode == 'auto-scan' or gl_args.mode == 'file-scan':
        timeout = Timeout(connect=1.0, read=3.0)
        pool = PoolManager(timeout=timeout, retries=1, cert_reqs='CERT_NONE')
    else:
        timeout = Timeout(connect=3.0, read=6.0)
        pool = PoolManager(timeout=timeout, cert_reqs='CERT_NONE')

    url_check = parse_url(url)
    if '443' in str(url_check.port) and url_check.scheme != 'https':
        url = "https://" + str(url_check.host) + ":" + str(url_check.port)

    print(GREEN + "\n ** Checking Host: %s **\n" % url)

    headers = {
        "Accept":
        "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
        "Connection": "keep-alive",
        "User-Agent": user_agents[randint(0,
                                          len(user_agents) - 1)]
    }

    paths = {
        "jmx-console":
        "/jmx-console/HtmlAdaptor?action=inspectMBean&name=jboss.system:type=ServerInfo",
        "web-console": "/web-console/ServerInfo.jsp",
        "JMXInvokerServlet": "/invoker/JMXInvokerServlet",
        "admin-console": "/admin-console/"
    }

    for i in paths.keys():
        if gl_interrupted: break
        try:
            print(GREEN + " * Checking %s: \t" % i + ENDC),
            r = pool.request('HEAD',
                             url + str(paths[i]),
                             redirect=False,
                             headers=headers)
            paths[i] = r.status

            # check if it's false positive
            if len(r.getheaders()) == 0:
                print(RED +
                      "[ ERROR ]\n * The server %s is not an HTTP server.\n" %
                      url + ENDC)
                paths = {
                    "jmx-console": 505,
                    "web-console": 505,
                    "JMXInvokerServlet": 505,
                    "admin-console": 505
                }
                break

            if paths[i] in (301, 302, 303, 307, 308):
                url_redirect = r.get_redirect_location()
                print(GREEN +
                      "[ REDIRECT ]\n * The server sent a redirect to: %s\n" %
                      url_redirect)
            elif paths[i] == 200 or paths[i] == 500:
                if i == "admin-console":
                    print(RED + "[ EXPOSED ]" + ENDC)
                else:
                    print(RED + "[ VULNERABLE ]" + ENDC)
            else:
                print(GREEN + "[ OK ]")
        except:
            print(RED +
                  "\n * An error occurred while connecting to the host %s\n" %
                  url + ENDC)
            paths[i] = 505

    return paths
Example #56
0
 def __get_weather_resp(self, url):
     http = PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
     resp = http.request("GET", url)
     return resp
Example #57
0
def send_request(endpoint, data, pool_manager=None):
    if pool_manager is None:
        pool_manager = PoolManager()
    return pool_manager.urlopen('POST', endpoint, headers={'Content-Type': 'application/json'}, body=json.dumps(data))
Example #58
0
    import certifi
    import gnupg
    import dateutil.parser
    import colorama
    from colorama import Fore, Style
    from github_release import (get_releases, gh_asset_download,
                                gh_asset_upload, gh_asset_delete)
    from urllib3 import PoolManager
except ImportError as e:
    print('Import error:', e)
    print('To run script install required packages with the next command:\n\n'
          'pip install githubrelease python-gnupg pyOpenSSL cryptography idna'
          ' certifi python-dateutil click colorama requests LinkHeader')
    sys.exit(1)

HTTP = PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())

FNULL = open(os.devnull, 'w')

HOME_DIR = os.path.expanduser('~')
CONFIG_NAME = '.sign-releases'
SEARCH_COUNT = 1
SHA_FNAME = 'SHA256SUMS.txt'

# make_ppa related definitions
PPA_SERIES = {
    'trusty': '14.04.1',
    'xenial': '16.04.1',
    'bionic': '18.04.1',
    'cosmic': '18.10.1',
}
class ProxyService(object):
    def __init__(self, error_dict):
        self.redis_service = RedisService()
        self.error_proxy_dict = error_dict
        self.connection_pool = PoolManager()

    def get_proxy(self, protocol):
        proxy = self.redis_service.read_set(PROXY_URL_KEY.format(protocol=protocol))
        if not proxy:
            return None
        return {protocol: proxy}

    def get_valid_size(self, protocol):
        return self.redis_service.get_set_size(PROXY_URL_KEY.format(protocol=protocol))

    def process(self):
        logger.info('Start load proxy.')
        #content = self._scrape_http_proxy()
        #parser_proxy_url_set = self._parser_http_proxy(content)
        #self._save('http', self._check('http', parser_proxy_url_set))

        content = self._scrape_https_proxy()
        parser_proxy_url_set = self._parser_https_proxy(content)
        self._save('https', self._check('https', parser_proxy_url_set))

    def manage(self, proxy, error):
        if not proxy:
            return
        protocol, proxy_url = proxy.items()[0]
        if error:
            if proxy_url in self.error_proxy_dict:
                self.error_proxy_dict[proxy_url] += 1
                if self.error_proxy_dict[proxy_url] > DEFAULT_ERROR_TIMES:
                    self.redis_service.remove_set(PROXY_URL_KEY.format(protocol=protocol), proxy_url)
                    self.error_proxy_dict.pop(proxy_url)
                    logger.info('Invalid proxy: {}'.format(proxy_url))
                    print 'Invalid proxy'
            else:
                self.error_proxy_dict[proxy_url] = 1
        else:
            if proxy_url in self.error_proxy_dict:
                self.error_proxy_dict[proxy_url] -= 1
                if self.error_proxy_dict[proxy_url] < 1:
                    self.error_proxy_dict.pop(proxy_url)
        logger.info(self.error_proxy_dict)

    @retry(2)
    def _scrape_http_proxy(self):
        scrape_url = 'http://www.xicidaili.com/nn'
        header = {'content-type': 'text/html',
                  'User-Agent': user_agents[random.randint(0, len(user_agents)-1)]}
        try:
            response = self.request.get(scrape_url, headers=header, proxies=None)
            return response.content
        except:
            raise Exception('Failed scrape proxies.')

    @retry(2)
    def _scrape_https_proxy(self):
        #scrape_url = 'http://www.nianshao.me/?stype=2'
        scrape_url = 'http://proxy.moo.jp/zh/?c=&pt=&pr=HTTPS&a%5B%5D=0&a%5B%5D=1&a%5B%5D=2&u=60'
        header = {'content-type': 'text/html',
                  'Accept-Language': 'zh-CN,zh;q=0.8',
                  'User-Agent': user_agents[random.randint(0, len(user_agents)-1)]}
        response = self.connection_pool.request('GET', scrape_url, timeout=60, headers=header)
        return response.data

    def _parser_http_proxy(self, content):
        soup = BeautifulSoup(content, 'html.parser')
        proxy_tag = soup.find(id='ip_list').select('tr')
        parser_proxy_url_set = set()
        for i in range(1, 21):
            proxy_url = PROXY_URL.format(protocol='http',
                                         ip=proxy_tag[i].find_all('td')[1].string,
                                         port=proxy_tag[i].find_all('td')[2].string)
            parser_proxy_url_set.add(proxy_url)
        return parser_proxy_url_set

    def _parser_https_proxy(self, content):
        soup = BeautifulSoup(content, 'html.parser')
        proxy_tag = soup.find_all('tr', {'class': 'Odd'})
        res = re.compile('%(%|\w)+')
        parser_proxy_url_set = set()
        for i in range(0, 25):
            tds = proxy_tag[i].find_all('td')
            if not tds[0].string:
                continue
            ip_res = res.search(tds[0].string)
            if ip_res:
                ip = urllib.unquote(ip_res.group(0))
                port = tds[1].string
                proxy_url = PROXY_URL.format(protocol='https', ip=ip, port=port)
                parser_proxy_url_set.add(proxy_url)
        return parser_proxy_url_set

    def _check(self, protocol, proxy_url_set):
        valid_proxy_url_set = set()
        for url in proxy_url_set:
            header = {'content-type': 'text/html',
                      'User-Agent': user_agents[random.randint(0, len(user_agents)-1)]}
            proxy = {protocol: url}
            conection_pool = ProxyManager(url)
            try:
                response = conection_pool.request('GET', CHECK_URL[protocol], timeout=60, headers=header)
                if response.status == 200:
                    valid_proxy_url_set.add(url)
                    print 'Valid proxy url', url
                else:
                    print 'Invalid ', url
            except Exception as ex:
                print ex
                print 'Invalid ', url

        return valid_proxy_url_set

    def _save(self, protocol, parser_proxy_url_set):
        for url in parser_proxy_url_set:
            self.redis_service.add_set(PROXY_URL_KEY.format(protocol=protocol), url)
Example #60
0
def push_opentsdb(cluster):
	pm = PoolManager()
	ts = str(time.time()).split(".")[0]
	port = get_port()
	hostname = get_hostname()
	#hostname = "haoziyu-worker-dev003-bjdxt9.qiyi.virtual"
	ma = MesosJunkman(cluster,hostname, port)
	path = "/page/cpu/usage"
	cpu_idle = ma.host_cpu(path)
	data = []
	for cpu in cpu_idle:
		cpu["timestamp"] = int(ts)
		cpu["tags"] = {
			"hostname":hostname,
			"cluster":cluster
		}
		data.append(cpu)
	path = "/page/memory"
	mem = ma.host_memory(path)
	for m in mem:
		m["timestamp"] = int(ts)
		m["tags"] = {
			"hostname":hostname,
			"cluster":cluster
		}
		data.append(m)
	path = "/page/system/loadavg"
	loadavg = ma.host_loadavg(path)
	for ld in loadavg:
		ld["timestamp"] = int(ts)
		ld["tags"] = {
			"hostname":hostname,
			"cluster":cluster
		}
		data.append(ld)
	# path = "/page/df"
	# df = ma.host_disk_df(path)
	# for d in df:
	# 	d["timestamp"] = int(ts)
	# 	tags = d["tags"].split(",")
	# 	mount = tags[2].split("=")[1]
	# 	fstype = tags[3].split("=")[1]
	# 	d["tags"] = {
	# 		"hostname":hostname,
	# 		"cluster":cluster,
	# 		"mount":mount,
	# 		"fstype":fstype
	# 	}
	# 	data.append(d)

	# path = "/page/diskio"
	# dio = ma.host_disk_io(path)
	# for io in dio:
	# 	io["timestamp"] = int(ts)
	# 	tags = io["tags"].split(",")
	# 	device = tags[2].split("=")[1]
	# 	io["tags"] = {
	# 		"hostname":hostname,
	# 		"cluster":cluster,
	# 		"device":device
	# 	}
	# 	data.append(io)
	# print(data)
	logger.info(data)
	pm.urlopen("POST","http://10.15.230.1:4242/api/put",headers={"Content-Type":"application/json"},body=json.dumps(data))