Ejemplo n.º 1
0
    def __init__(self, client_id=None, secret=None):
        """
        知乎客户端,这是获取所有类的入口。

        :param str|unicode client_id: 客户端 ID。
        :param str|unicode secret: 客户端 ID 对应的 SECRET KEY。
        :rtype: :class:`.ZhihuClient`
        """
        self._session = requests.session()

        # remove SSL Verify
        self._session.verify = False
        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        # Add auto retry for session
        self._session.mount('http://', ADAPTER_WITH_RETRY)
        self._session.mount('https://', ADAPTER_WITH_RETRY)

        # client_id and secret shouldn't have default value
        # after zhihu open api
        self._client_id = client_id or CLIENT_ID
        self._secret = secret or APP_SECRET

        self._login_auth = BeforeLoginAuth(self._client_id)
        self._token = None
Ejemplo n.º 2
0
def leancloud_init():
    '''
    leancloud初始化
    :return:
    '''
    urllib3.disable_warnings()
    leancloud.init('lXyQBue2G2I80NX9OIFY7TRk', 'NkLOGPRHeVrFdJOQiDIGVGJ7')
Ejemplo n.º 3
0
def configure_logging(log_config=None, debug=None, forward_stdout=None,
                      hide_ssl_warnings=None):
    """Configure the logging

    Loading logging configuration file which is defined in the general
    configuration file and configure the logging system.
    Setting the level of console handler to DEBUG mode if debug option is set
    as True.
    Wrap the stdout stream by StdoutLogger.
    """
    if log_config is None:
        log_config = CONF.migrate.log_config
    if debug is None:
        debug = CONF.migrate.debug
    if forward_stdout is None:
        forward_stdout = CONF.migrate.forward_stdout
    if hide_ssl_warnings is None:
        hide_ssl_warnings = CONF.migrate.hide_ssl_warnings

    with open(log_config, 'r') as f:
        config.dictConfig(yaml.load(f))
    if debug:
        logger = logging.getLogger('cloudferry')
        for handler in logger.handlers:
            if handler.name == 'console':
                handler.setLevel(logging.DEBUG)
    if forward_stdout:
        sys.stdout = StdoutLogger()
    if hide_ssl_warnings:
        urllib3.disable_warnings()
Ejemplo n.º 4
0
    def __init__(self, config):
        """
        Initialization is responsible for fetching service instance objects for each vCenter instance
        pyvmomi has some security checks enabled by default from python 2.7.9 onward to connect to vCenter.
        """

        # Holds all the VMs' instanceuuid that are discovered for each of the vCenter. Going ahead it would hold all the
        # other managed objects of vCenter that would be monitored.
        self.mors = {}  # Now mars is act as <key,value>. here key is instance UUID and Values is morf Id

        self.params = config

        global metrics
        global counters

        metrics = util.parse_metrics()
        counters = util.parse_counters()

        self.needed_metrics = {}
        self.configured_metrics = {}
        self.refresh_rates = {}
        self.service_instance = ""

        for k, v in metrics.items():
            self.configured_metrics.update({util.get_counter(k): v})

        if sys.version_info > (2, 7, 9) and sys.version_info < (3, 0, 0):
            # https://www.python.org/dev/peps/pep-0476/
            # Look for 'Opting out' section in this that talks about disabling the certificate verification

            # Following line helps to disable globally
            ssl._create_default_https_context = ssl._create_unverified_context

        # Disabling the security warning message, as the certificate verification is disabled
        urllib3.disable_warnings()

        try:

            service_instance = connect.SmartConnectNoSSL(host=self.params['host'],
                                                    user=self.params['username'],
                                                    pwd=self.params['password'],
                                                    port=int(self.params['port']))
            util.sendEvent("Plugin vmware", "Sucessfully connected to vCenter: [" + self.params['host'] + "]", "info")
            atexit.register(connect.Disconnect, service_instance)
            self.service_instance = service_instance
            self._cache_metrics_metadata(self.params['host'])

        except KeyError as ke:
            util.sendEvent("Plugin vmware: Key Error", "Improper param.json, key missing: [" + str(ke) + "]", "error")
            # sys.exit(-1)
        except ConnectionError as ce:
            util.sendEvent("Plugin vmware: Error connecting to vCenter",
                           "Could not connect to the specified vCenter host: [" + str(ce) + "]", "critical")

        except Exception as se:
            util.sendEvent("Plugin vmware: Unknown Error", "[" + str(se) + "]", "critical")
            # sys.exit(-1)
        except vim.fault.InvalidLogin as il:
            util.sendEvent("Plugin vmware: Error logging into vCenter",
                           "Could not login to the specified vCenter host: [" + str(il) + "]", "critical")
Ejemplo n.º 5
0
def main(filename):
    disable_warnings()
    if not os.path.isfile(os.path.realpath(filename)):
        sys.stderr.write("File %s not found.\n" % filename)
        sys.exit(1)
    basename = os.path.splitext(filename)[0]

    response = get_subtitleinfo(filename)
    sys.stdout.write("Requesting subtitle file...\n")
    subtitles = set([])
    for count in xrange(len(response.json())):
        if count != 0:
            _basename = "%s-alt.%s" % (basename, count)
        else:
            _basename = "%s.%s" % (basename, count)

        for fileinfo in response.json()[count]['Files']:
            url = fileinfo['Link']
            ext = fileinfo['Ext']
            _response = requests.get(url, verify=False)
            filename = "%s.%s" % (_basename, ext)

            if _response.ok and _response.text not in subtitles:
                subtitles.add(_response.text)
                fobj = open(filename, 'w')
                fobj.write(_response.text.encode("UTF8"))
                fobj.close()
Ejemplo n.º 6
0
 def __init__(self, api_key):
     self.api_key = base64.encodestring('%s:' % api_key).replace('\n', '')
     self.user_agent = "SalesMonitoringAgent"
     self.headers = {'Authorization': "Basic %s" % self.api_key }
     self.headers['Content-type'] = 'application/json'
     self.headers['Accept'] = 'application/json'
     urllib3.disable_warnings()
 def __init__(self):
     urllib3.disable_warnings()
     self.header = {'HOST': 'www.zhihu.com', 'Referer': 'https://www.zhihu.com/people/ipreacher/answers',
         'USER-AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) '
                       'Chrome/55.0.2883.95 Safari/537.36'}
     self.session = requests.session()
     self.session.get('https://www.zhihu.com/', headers=self.header, verify=False)
Ejemplo n.º 8
0
    def validate(self):
        """Run validation using HTTP requests against validation host

        Using rules provided by spec, perform requests against validation host
        for each rule. Request response is verified to match the spec respsonse
        rule.  This will yield either a :py:cls:`ValidationPass` or
        :py:cls:`ValidationFail` response.
        """
        session = Session()
        if not self.verify and hasattr(urllib3, 'disable_warnings'):
            urllib3.disable_warnings()
        for rule in self.spec.get_rules():
            req = rule.get_request(self.host, self.port)
            if self.debug:
                pprint.pprint(req.__dict__)
            try:
                resp = session.send(req.prepare(), allow_redirects=False,
                                    verify=self.verify)
                if self.debug:
                    pprint.pprint(resp.__dict__)
                if rule.matches(resp):
                    yield ValidationPass(rule=rule, request=req, response=resp)
            except (ConnectionError, SSLError) as exc:
                # No response yet
                yield ValidationFail(rule=rule, request=req, response=None,
                                     error=exc)
            except ValidationError as exc:
                # Response received, validation error
                yield ValidationFail(rule=rule, request=req, response=resp,
                                     error=exc)
Ejemplo n.º 9
0
def main():
    from requests.packages import urllib3
    urllib3.disable_warnings()

    parser = build_parser()
    args = parser.parse_args()

    plugins = []

    try:
        for plugin_name in args.plugins:
            cl = import_class(plugin_name)
            plugins.append(cl())

        nocmd = None
        if args.nocmd is not None:
            cl = import_class(args.nocmd)
            nocmd = cl()
    except Exception as e:
        parser.error(e.message)

    tg = TGBot(args.token, plugins=plugins, no_command=nocmd, db_url=args.db_url)

    if args.list:
        tg.print_commands()
        return

    if args.token is None:
        parser.error('--token is required')

    if args.webhook is None:
        tg.run(polling_time=args.polling)
    else:
        tg.run_web(args.webhook[0], host='0.0.0.0', port=int(args.webhook[1]))
def patch_urllib3():
    try:
        # disable the annoying AppenginePlatformWarning's
        import urllib3
        urllib3.disable_warnings()
    except ImportError:
        pass
Ejemplo n.º 11
0
def configure_logging(debug=None, verbose=None):
    """Configure the logging system

    Load the logging configuration file and configure the
    logging system.

    :param debug: allows to enable/disable the console
    debug mode
    :param verbose: allows to verbose the debug messages
    """

    with open(CONF.basic.log_config, 'r') as f:
        logging.config.dictConfig(yaml.load(f))

    logger = logging.getLogger()
    if not debug:
        for handler in logger.handlers:
            if handler.name == 'console':
                handler.setLevel(logging.INFO)
                handler.setFormatter(logging.Formatter())
                break
    if verbose:
        logger.setLevel(logging.DEBUG)

    if CONF.basic.hide_ssl_warnings:
        urllib3.disable_warnings()

    CONF.log_opt_values(logging, logging.DEBUG)
Ejemplo n.º 12
0
    def __init__(self, address, **kwargs):
        """
        explain me ; )

        """
        utils.validate_uri(address)
        # TODO: update me with a config file entry need to update to get the version from docker and use that.
        self.API_VERSION = DOCKER_API_VERSION
        self.url         = urlparse(address)

        if self.url.scheme == 'https':
            # TODO: Need to allow for ca to be passed if not disable warnings.
            urllib3.disable_warnings()

            for cert_name_type in ('ca', 'cert', 'key'):
                cert_path = utils.validate_path(os.path.join(kwargs['ssl_cert_path'], "{0}.pem".format(cert_name_type))) \
                    if 'ssl_cert_path' in kwargs and kwargs['ssl_cert_path'] else None
                setattr(self, 'SSL_{0}_PATH'.format(cert_name_type.upper()), cert_path)

            self.SSL_VERIFY = kwargs['verify'] if 'verify' in kwargs and isinstance(kwargs['verify'], bool) else True

            if not self.SSL_VERIFY:
                self.SSL_CA_PATH = None

            client_certs = (self.SSL_CERT_PATH, self.SSL_KEY_PATH) if self.SSL_KEY_PATH and self.SSL_CERT_PATH else None
            tls_config   = docker.tls.TLSConfig(client_cert=client_certs, ca_cert=self.SSL_CA_PATH, verify=self.SSL_VERIFY)

            self._client_session = docker.Client(self.url.geturl(), tls=tls_config, timeout=DOCKER_DEFAULT_TIMEOUT, version=self.API_VERSION)
        else:
            self._client_session = docker.Client(self.url.geturl(), timeout=DOCKER_DEFAULT_TIMEOUT, version=self.API_VERSION)

        self._docker_info = self._client_session.version()
        self._injector = None
Ejemplo n.º 13
0
	def disable_warnings(cls, debug):
		failures = 0
		exmsg = ''
		try:
			import requests.packages.urllib3 as ul3
			if debug:
				pdbg("Using requests.packages.urllib3 to disable warnings")
			#ul3.disable_warnings(ul3.exceptions.InsecureRequestWarning)
			#ul3.disable_warnings(ul3.exceptions.InsecurePlatformWarning)
			ul3.disable_warnings()
		except Exception as ex:
			failures += 1
			exmsg += formatex(ex) + '-' * 64 + '\n'

		# i don't know why under Ubuntu, 'pip install requests'
		# doesn't install the requests.packages.* packages
		try:
			import urllib3 as ul3
			if debug:
				pdbg("Using urllib3 to disable warnings")
			ul3.disable_warnings()
		except Exception as ex:
			failures += 1
			exmsg += formatex(ex)

		if failures >= 2:
			perr("Failed to disable warnings for Urllib3.\n"
				"Possibly the requests library is out of date?\n"
				"You can upgrade it by running '{}'.\nExceptions:\n{}".format(
					const.PipUpgradeCommand, exmsg))
Ejemplo n.º 14
0
    def sender(self):
        try:
            from requests.packages.urllib3 import disable_warnings
            disable_warnings()
        except:
            logger.info('Unable to disable https warnings. Expect some spam if using https nzb providers.')

        try:
            logger.info('parameters set to %s' % self.params)
            logger.info('sending now to %s' % self.sab_url)
            sendit = requests.post(self.sab_url, data=self.params, verify=False)
        except:
            logger.info('Failed to send to client.')
            return {'status': False}
        else:
            sendresponse = sendit.json()
            logger.info(sendresponse)
            if sendresponse['status'] is True:
                queue_params = {'status': True,
                                'nzo_id': ''.join(sendresponse['nzo_ids']),
                                'queue':  {'mode':   'queue',
                                           'search':  ''.join(sendresponse['nzo_ids']),
                                           'output':  'json',
                                           'apikey':  mylar.CONFIG.SAB_APIKEY}}

            else:
                queue_params = {'status': False}

            return queue_params
Ejemplo n.º 15
0
def _get_pool_manager(verify, cert_file, key_file):
    global _pool_manager
    default_pool_args = dict(maxsize=32,
                             cert_reqs=ssl.CERT_REQUIRED,
                             ca_certs=_default_certs,
                             headers=_default_headers,
                             timeout=_default_timeout)
    if cert_file is None and verify is None and 'DX_CA_CERT' not in os.environ:
        with _pool_mutex:
            if _pool_manager is None:
                if 'HTTPS_PROXY' in os.environ:
                    proxy_params = _get_proxy_info(os.environ['HTTPS_PROXY'])
                    default_pool_args.update(proxy_params)
                    _pool_manager = urllib3.ProxyManager(**default_pool_args)
                else:
                    _pool_manager = urllib3.PoolManager(**default_pool_args)
            return _pool_manager
    else:
        # This is the uncommon case, normally, we want to cache the pool
        # manager.
        pool_args = dict(default_pool_args,
                         cert_file=cert_file,
                         key_file=key_file,
                         ca_certs=verify or os.environ.get('DX_CA_CERT') or requests.certs.where())
        if verify is False or os.environ.get('DX_CA_CERT') == 'NOVERIFY':
            pool_args.update(cert_reqs=ssl.CERT_NONE, ca_certs=None)
            urllib3.disable_warnings()
        if 'HTTPS_PROXY' in os.environ:
            proxy_params = _get_proxy_info(os.environ['HTTPS_PROXY'])
            pool_args.update(proxy_params)
            return urllib3.ProxyManager(**pool_args)
        else:
            return urllib3.PoolManager(**pool_args)
Ejemplo n.º 16
0
    def download(self):
        """
        download the NXDL definitions described by ``ref``
        """
        _msg = u'disabling warnings about GitHub self-signed https certificates'
        #requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
        disable_warnings(InsecureRequestWarning)

        creds = get_BasicAuth_credentials()
        content = None
        for _retry in range(GITHUB_RETRY_COUNT):        # noqa
            try:
                if creds is None:
                    content = requests.get(self.zip_url, verify=False)
                else:
                    content = requests.get(self.zip_url, 
                                     auth=(creds['user'], creds['password']),
                                     verify=False,
                                     )
            except requests.exceptions.ConnectionError as _exc:
                _msg = 'ConnectionError from ' + self.zip_url
                _msg += '\n' + str(_exc)
                raise IOError(_msg)
            else:
                break

        return content
Ejemplo n.º 17
0
        def requests_get_mock(*args, **kwargs):
            self.assertEquals(kwargs.get('timeout'), 5)

            with requests.sessions.Session() as session:
                from requests.packages import urllib3
                urllib3.disable_warnings()
                return requests_mock.create_response(
                    session.request('GET', args[0]), json={})
Ejemplo n.º 18
0
 def _suppress_urllib3_error(self):
     try:
         # Try to suppress user warnings
         from requests.packages.urllib3.exceptions import InsecureRequestWarning
         from requests.packages.urllib3 import disable_warnings
         disable_warnings(InsecureRequestWarning)
     except:
         pass
Ejemplo n.º 19
0
 def init(self, url=None, username=None, password=None):
     self.url = url
     self.username = username
     self.password = password
     self.auth = HTTPBasicAuth(username, password)
     self.content_type = {'Content-Type': 'application/xml'}
     self.accept_header = {'Accept': 'application/xml'}
     disable_warnings()
Ejemplo n.º 20
0
def main():
    urllib3.disable_warnings()
    path, url = sys.argv[1:]
    with open(os.path.expanduser(path)) as auth_source:
        username, password = auth_source.read().strip().split(':')
    response = requests.get(url, auth=HTTPBasicAuth(username, password))
    if response.status_code != 200 or not response.text.startswith('<SUCCESS CODE="20'):
        print response.text
Ejemplo n.º 21
0
 def __init__(self):
     urllib3.disable_warnings()
     self.conn = mysql.connector.connect(user='******', password='', database='f1000')
     self.cursor = self.conn.cursor(buffered=True)
     self.header = {'HOST': 'f1000.com', 'Origin': 'https://f1000.com',
                    'Referer': 'https://f1000.com/prime/recommendations',
                    'USER-AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) '
                                  'Chrome/55.0.2883.95 Safari/537.36'}
     self.session = requests.session()
def main():
    args = get_args()

    urllib3.disable_warnings()
    si = None
    context = None
    if hasattr(ssl, 'SSLContext'):
        context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        context.verify_mode = ssl.CERT_NONE
    if context:
        # Python >= 2.7.9
        si = SmartConnect(host=args.host,
                          port=int(args.port),
                          user=args.user,
                          pwd=args.password,
                          sslContext=context)
    else:
        # Python >= 2.7.7
        si = SmartConnect(host=args.host,
                          port=int(args.port),
                          user=args.user,
                          pwd=args.password)
    atexit.register(Disconnect, si)
    print "Connected to vCenter Server"

    content = si.RetrieveContent()

    datacenter = get_obj(content, [vim.Datacenter], args.datacenter_name)
    if not datacenter:
        raise Exception("Couldn't find the Datacenter with the provided name "
                        "'{}'".format(args.datacenter_name))

    cluster = get_obj(content, [vim.ClusterComputeResource], args.cluster_name,
                      datacenter.hostFolder)

    if not cluster:
        raise Exception("Couldn't find the Cluster with the provided name "
                        "'{}'".format(args.cluster_name))

    host_obj = None
    for host in cluster.host:
        if host.name == args.host_name:
            host_obj = host
            break

    vm_folder = datacenter.vmFolder

    template = get_obj(content, [vim.VirtualMachine], args.template_name,
                       vm_folder)

    if not template:
        raise Exception("Couldn't find the template with the provided name "
                        "'{}'".format(args.template_name))

    location = _get_relocation_spec(host_obj, cluster.resourcePool)
    _take_template_snapshot(si, template)
    _clone_vm(si, template, args.vm_name, vm_folder, location)
Ejemplo n.º 23
0
 def __init__(self, config=Config()):
     """:type _config: :class:`Config`"""
     self._config = config
     self.session_id = None
     self.user = None
     self.country_code = 'US'   # Enable Trial Mode
     self.client_unique_key = None
     try:
         urllib3.disable_warnings() # Disable OpenSSL Warnings in URLLIB3
     except:
         pass
Ejemplo n.º 24
0
 def inner(insecure, **kwargs):
     if insecure:
         # If user chose to use insecure explicitly, ignore warnings...
         try:
             import requests.packages.urllib3 as urllib3
             urllib3.disable_warnings()
             click.echo("Warning: Not verifying TLS certificates.")
         except ImportError:
             pass  # If requests moves urllib3 around
     appcheck = get_appcheck(insecure=insecure)
     f(appcheck, **kwargs)
Ejemplo n.º 25
0
def main():
    urllib3.disable_warnings()
    parser = argparse.ArgumentParser(description='Process user input for piazza queries')
    parser.add_argument('-q', '--query', nargs="+")
    parser.add_argument('-t', '--tag', nargs=1)
    #parser.add_argument('-r', '--range', nargs=2)
    parser.add_argument('-i', '--instructor-only', action='store_true')
    parser.add_argument('-p', '--pinned', action='store_true')
    parser.add_argument('-f', '--following', action='store_true')
    parser.add_argument('-l', '--force-login', action='store_true')
    args = parser.parse_args()

    queryObj = QueryObj()
    queryObj.add_query(args.query)
    queryObj.add_tag(args.tag)
    #queryObj.add_time_range(args.range)
    queryObj.bool_inst_notes(args.instructor_only)
    queryObj.bool_pinned(args.pinned)
    queryObj.bool_following(args.following)

    loginfile = os.path.expanduser("~") + "/.pizza"

    if not args.force_login:
        try:
            pkl = pickle.load(open(loginfile,"rb"))
            data = {'email': pkl['email'], 'password': pkl['password'].decode('rot13')}
        except IOError:
            email = raw_input('Piazza Email: ')
            password = getpass.getpass()
            data = {'email': email, 'password': password}
            pkl = {'email': email, 'password': password.encode('rot13')}
            pickle.dump(pkl, open(loginfile, "wb"))

    piazza = Piazza()
    piazza.user_login(data['email'], data['password'])
    user_status = piazza.get_user_status()

    classes = user_status['networks']
    classes = sorted(classes, key=lambda k: k['status'])
    # list classes
    print("Choose a Class")
    counter = 1
    for c in classes:
        info = c['name']
        if c['status'] == 'inactive':
            info = '(inactive) ' + info
        print '{0:2d}: {1:s}'.format(counter, info)
        counter = counter + 1


    index = raw_input('Class Number: ')
    network = piazza.network(classes[int(index) - 1]['id'])
    feed_processor = FeedProcessor(network, queryObj)
    curses.wrapper(summary_viewer.view_summaries, feed_processor, network)
Ejemplo n.º 26
0
    def __init__(self):
        super(HttpMixin, self).__init__()
        self._http_log = logger

        if not self.verify:
            if self._http_log.handlers:
                self._http_log.warn(HTTP_INSECURE_MESSAGE)
            else:
                print(HTTP_INSECURE_MESSAGE)

            from requests.packages.urllib3 import disable_warnings
            disable_warnings()
Ejemplo n.º 27
0
def configure_urllib3():
    # Disable verbose urllib3 warnings and log messages
    urllib3.disable_warnings(category=urllib3.exceptions.InsecurePlatformWarning)
    logging.getLogger('dxpy.packages.requests.packages.urllib3.connectionpool').setLevel(logging.ERROR)

    # Trust DNAnexus S3 upload tunnel
    def _match_hostname(cert, hostname):
        if hostname == "ul.cn.dnanexus.com":
            hostname = "s3.amazonaws.com"
        match_hostname(cert, hostname)

    urllib3.connection.match_hostname = _match_hostname
Ejemplo n.º 28
0
def disable_ssl_warnings():
    try:
        from requests.packages import urllib3
        urllib3.disable_warnings()
    except ImportError:
        pass

    try:
        import urllib3
        urllib3.disable_warnings()
    except ImportError:
        pass
Ejemplo n.º 29
0
 def __init__(self):
     """
     Creates a twitter endpoint that can
     send authenticated requests.
     """
     self.auth = requests_oauthlib.OAuth1(CLIENT_KEY,
                     client_secret=CLIENT_SECRET,
                     resource_owner_key=RESOURCE_OWNER_KEY,
                     resource_owner_secret=RESOURCE_OWNER_SECRET)
     self.response = None
     self.json = None
     # suppress urllib3 InsecurePlatformWarning
     urllib3.disable_warnings()
Ejemplo n.º 30
0
 def on_task_start(self, task, config):
     if config is False:
         task.requests.verify = False
         # Disabling verification results in a warning for every HTTPS
         # request:
         # "InsecureRequestWarning: Unverified HTTPS request is being made.
         #  Adding certificate verification is strongly advised. See:
         #  https://urllib3.readthedocs.io/en/latest/security.html"
         # Disable those warnings because the user has explicitly disabled
         # verification and the warning is not beneficial.
         # This change is permanent rather than task scoped, but there won't
         # be any warnings to disable when verification is enabled.
         urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
Ejemplo n.º 31
0
    def __init__(self,
                 fileconf=None,
                 client_token=None,
                 cluster_url=None,
                 cluster_unsafe=False,
                 cluster_timeout=None,
                 storage_url=None,
                 storage_unsafe=False,
                 retry_count=5,
                 retry_wait=1.0,
                 cluster_custom_certificate=None,
                 storage_custom_certificate=None,
                 sanitize_bucket_paths=True,
                 show_bucket_warnings=True):
        """Create a connection to a cluster with given config file, options or environment variables.
        Available environment variable are
        `QARNOT_CLUSTER_URL`, `QARNOT_CLUSTER_UNSAFE`, `QARNOT_CLUSTER_TIMEOUT` and `QARNOT_CLIENT_TOKEN`.

        :param fileconf: path to a qarnot configuration file or a corresponding dict
        :type fileconf: str or dict
        :param str client_token: API Token
        :param str cluster_url: (optional) Cluster url.
        :param bool cluster_unsafe: (optional) Disable certificate check
        :param int cluster_timeout: (optional) Timeout value for every request
        :param str storage_url: (optional) Storage service url.
        :param bool storage_unsafe: (optional) Disable certificate check
        :param int retry_count: (optional) ConnectionError retry count. Default to 5.
        :param float retry_wait: (optional) Retry on error wait time, progressive. (wait * (retry_count - retry_num). Default to 1s
        :param bool sanitize_bucket_paths: (optional) Flag to automatically sanitize bucket paths (remove extra slashes). Default to true
        :param bool show_bucket_warnings: (optional) Flag to show warnings of bucket paths sanitization. Default to true

        Configuration sample:

        .. code-block:: ini

           [cluster]
           # url of the REST API
           url=https://localhost
           # No SSL verification ?
           unsafe=False
           [client]
           # auth string of the client
           token=login
           [storage]
           url=https://storage
           unsafe=False

        """
        self._version = "qarnot-sdk-python/" + __version__
        self._http = requests.session()
        self._retry_count = retry_count
        self._retry_wait = retry_wait
        self._sanitize_bucket_paths = sanitize_bucket_paths
        self._show_bucket_warnings = show_bucket_warnings
        if fileconf is not None:
            self.storage = None
            if isinstance(fileconf, dict):
                warnings.warn(
                    "Dict config should be replaced by constructor explicit arguments."
                )
                self.cluster = None
                if fileconf.get('cluster_url'):
                    self.cluster = fileconf.get('cluster_url')
                if fileconf.get('storage_url'):
                    self.storage = fileconf.get('storage_url')
                auth = fileconf.get('client_auth')
                self.timeout: int = int(fileconf.get('cluster_timeout'))
                if fileconf.get('cluster_unsafe'):
                    self._http.verify = False
                elif fileconf.get('cluster_custom_certificate'):
                    self._http.verify = fileconf.get(
                        'cluster_custom_certificate')
            else:
                cfg = config.ConfigParser()
                with open(fileconf) as cfg_file:
                    cfg.read_string(cfg_file.read())

                    self.cluster = None
                    if cfg.has_option('cluster', 'url'):
                        self.cluster = cfg.get('cluster', 'url')
                    if cfg.has_option('storage', 'url'):
                        self.storage = cfg.get('storage', 'url')
                    if cfg.has_option('client', 'token'):
                        auth = cfg.get('client', 'token')
                    elif cfg.has_option('client', 'auth'):
                        warnings.warn('auth is deprecated, use token instead.')
                        auth = cfg.get('client', 'auth')
                    else:
                        auth = None
                    self.timeout = None
                    if cfg.has_option('cluster', 'timeout'):
                        self.timeout = cfg.getint('cluster', 'timeout')
                    if cfg.has_option('cluster', 'unsafe') \
                       and cfg.getboolean('cluster', 'unsafe'):
                        self._http.verify = False
                    elif cfg.has_option('cluster', 'custom_certificate'):
                        self._http.verify = cfg.get('cluster',
                                                    'custom_certificate')
                    if cfg.has_option('storage', 'unsafe') \
                       and cfg.getboolean('storage', 'unsafe'):
                        storage_unsafe = True
                    if cfg.has_option('storage', 'custom_certificate'):
                        storage_custom_certificate = cfg.get(
                            'storage', 'custom_certificate')
        else:
            self.cluster = cluster_url
            self.timeout = cluster_timeout
            self._http.verify = not cluster_unsafe
            if not cluster_unsafe and cluster_custom_certificate:
                self._http.verify = cluster_custom_certificate
            self.storage = storage_url
            auth = client_token

        if not self._http.verify:
            urllib3.disable_warnings()

        if self.cluster is None:
            self.cluster = os.getenv("QARNOT_CLUSTER_URL")

        if self.storage is None:
            self.storage = os.getenv("QARNOT_STORAGE_URL")

        if auth is None:
            auth = os.getenv("QARNOT_CLIENT_TOKEN")

        if os.getenv("QARNOT_CLUSTER_UNSAFE") is not None:
            self._http.verify = not os.getenv("QARNOT_CLUSTER_UNSAFE") in [
                "true", "True", "1"
            ]

        if os.getenv("QARNOT_CLUSTER_TIMEOUT") is not None:
            self.timeout = int(os.getenv("QARNOT_CLUSTER_TIMEOUT"))

        if auth is None:
            raise QarnotGenericException("Token is mandatory.")
        self._http.headers.update({"Authorization": auth})

        self._http.headers.update({"User-Agent": self._version})

        if self.cluster is None:
            self.cluster = "https://api.qarnot.com"

        api_settings = self._get(get_url("settings")).json()

        if self.storage is None:
            self.storage = api_settings.get("storage",
                                            "https://storage.qarnot.com")

            if self.storage is None:  # api_settings["storage"] is None
                self._s3client = None
                self._s3resource = None
                return

        user = self.user_info
        session = boto3.session.Session()
        conf = botocore.config.Config(user_agent=self._version)

        should_verify_or_certificate_path = True
        if storage_unsafe:
            should_verify_or_certificate_path = not storage_unsafe
        elif storage_custom_certificate is not None:
            should_verify_or_certificate_path = storage_custom_certificate

        self._s3client = session.client(
            service_name='s3',
            aws_access_key_id=user.email,
            aws_secret_access_key=auth,
            verify=should_verify_or_certificate_path,
            endpoint_url=self.storage,
            config=conf)
        self._s3resource = session.resource(
            service_name='s3',
            aws_access_key_id=user.email,
            aws_secret_access_key=auth,
            verify=should_verify_or_certificate_path,
            endpoint_url=self.storage,
            config=conf)
def main():
    # 去除 warning
    urllib3.disable_warnings()
    url = 'https://finland.fi/category/facts-stats-and-info/'
    fun_call(url)
Ejemplo n.º 33
0
    def request(self, host, handler, request_body, verbose=0):
        """Replace the xmlrpc request function.

        Process xmlrpc request via requests library.

        Args:
            host: Target host
            handler: Target PRC handler.
            request_body: XML-RPC request body.
            verbose: Debugging flag.

        Returns:
            Parsed response.

        Raises:
            RequestException: Error in requests
        """
        if verbose:
            self._debug()

        if not self._check_ssl_cert:
            disable_warnings()

        headers = {'User-Agent': self.user_agent, 'Content-Type': 'text/xml', }

        # Need to be done because the schema(http or https) is lost in
        # xmlrpc.Transport's init.
        if self._use_https:
            url = "https://{host}/{handler}".format(host=host, handler=handler)
        else:
            url = "http://{host}/{handler}".format(host=host, handler=handler)

        # TODO Construct kwargs query instead
        try:
            if self._authtype == "basic":
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    auth=HTTPBasicAuth(
                        self._username, self._password),
                    proxies=self._proxies)
            elif self._authtype == "digest":
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    auth=HTTPDigestAuth(
                        self._username, self._password),
                    proxies=self._proxies)
            else:
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    proxies=self._proxies)

            response.raise_for_status()
        except RequestException as error:
            raise xmlrpc_client.ProtocolError(url,
                                              error.message,
                                              traceback.format_exc(),
                                              response.headers)

        return self.parse_response(response)
Ejemplo n.º 34
0
def runCouchPotato(options,
                   base_path,
                   args,
                   data_dir=None,
                   log_dir=None,
                   Env=None,
                   desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    Env.set('encoding', encoding)

    # Do db stuff
    db_path = sp(os.path.join(data_dir, 'database'))
    old_db_path = os.path.join(data_dir, 'couchpotato.db')

    # Remove database folder if both exists
    if os.path.isdir(db_path) and os.path.isfile(old_db_path):
        db = SuperThreadSafeDatabase(db_path)
        db.open()
        db.destroy()

    # Check if database exists
    db = SuperThreadSafeDatabase(db_path)
    db_exists = db.exists()
    if db_exists:

        # Backup before start and cleanup old backups
        backup_path = sp(os.path.join(data_dir, 'db_backup'))
        backup_count = 5
        existing_backups = []
        if not os.path.isdir(backup_path): os.makedirs(backup_path)

        for root, dirs, files in os.walk(backup_path):
            # Only consider files being a direct child of the backup_path
            if root == backup_path:
                for backup_file in sorted(files):
                    ints = re.findall('\d+', backup_file)

                    # Delete non zip files
                    if len(ints) != 1:
                        try:
                            os.remove(os.path.join(root, backup_file))
                        except:
                            pass
                    else:
                        existing_backups.append((int(ints[0]), backup_file))
            else:
                # Delete stray directories.
                shutil.rmtree(root)

        # Remove all but the last 5
        for eb in existing_backups[:-backup_count]:
            os.remove(os.path.join(backup_path, eb[1]))

        # Create new backup
        new_backup = sp(
            os.path.join(backup_path, '%s.tar.gz' % int(time.time())))
        zipf = tarfile.open(new_backup, 'w:gz')
        for root, dirs, files in os.walk(db_path):
            for zfilename in files:
                zipf.add(os.path.join(root, zfilename),
                         arcname='database/%s' %
                         os.path.join(root[len(db_path) + 1:], zfilename))
        zipf.close()

        # Open last
        db.open()

    else:
        db.create()

    # Force creation of cachedir
    log_dir = sp(log_dir)
    cache_dir = sp(os.path.join(data_dir, 'cache'))
    python_cache = sp(os.path.join(cache_dir, 'python'))

    if not os.path.exists(cache_dir):
        os.mkdir(cache_dir)
    if not os.path.exists(python_cache):
        os.mkdir(python_cache)

    session = requests.Session()
    session.max_redirects = 5

    # Register environment settings
    Env.set('app_dir', sp(base_path))
    Env.set('data_dir', sp(data_dir))
    Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log')))
    Env.set('db', db)
    Env.set('http_opener', session)
    Env.set('cache_dir', cache_dir)
    Env.set('cache', FileSystemCache(python_cache))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default=False, type='bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default=False, type='bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in [
            'enzyme', 'guessit', 'subliminal', 'apscheduler', 'tornado',
            'requests'
    ]:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Disable SSL warning
    disable_warnings()

    # Use reloader
    reloader = debug is True and development and not Env.get(
        'desktop') and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
                                  '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or
            options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'),
                                         'a',
                                         500000,
                                         10,
                                         encoding=Env.get('encoding'))
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    # noinspection PyUnresolvedReferences
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    # Check available space
    try:
        total_space, available_space = getFreeSpace(data_dir)
        if available_space < 100:
            log.error(
                'Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left',
                available_space)
            return
    except:
        log.error('Failed getting diskspace: %s', traceback.format_exc())

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Create app
    from couchpotato import WebHandler
    web_base = ('/' + Env.setting('url_base').lstrip('/') +
                '/') if Env.setting('url_base') else '/'
    Env.set('web_base', web_base)

    api_key = Env.setting('api_key')
    if not api_key:
        api_key = uuid4().hex
        Env.setting('api_key', value=api_key)

    api_base = r'%sapi/%s/' % (web_base, api_key)
    Env.set('api_base', api_base)

    # Basic config
    host = Env.setting('host', default='0.0.0.0')
    host6 = Env.setting('host6', default='::')
    # app.debug = development
    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default=5050)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'host6': host6 if host6 and len(host6) > 0 else '::',
        'ssl_cert': Env.setting('ssl_cert', default=None),
        'ssl_key': Env.setting('ssl_key', default=None),
    }

    # Load the app
    application = Application(
        [],
        log_function=lambda x: None,
        debug=config['use_reloader'],
        gzip=True,
        cookie_secret=api_key,
        login_url='%slogin/' % web_base,
    )
    Env.set('app', application)

    # Request handlers
    application.add_handlers(
        ".*$",
        [
            (r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler),

            # API handlers
            (r'%s(.*)(/?)' % api_base, ApiHandler),  # Main API handler
            (r'%sgetkey(/?)' % web_base, KeyHandler),  # Get API key
            (r'%s' % api_base, RedirectHandler, {
                "url": web_base + 'docs/'
            }),  # API docs

            # Login handlers
            (r'%slogin(/?)' % web_base, LoginHandler),
            (r'%slogout(/?)' % web_base, LogoutHandler),

            # Catch all webhandlers
            (r'%s(.*)(/?)' % web_base, WebHandler),
            (r'(.*)', WebHandler),
        ])

    # Static paths
    static_path = '%sstatic/' % web_base
    for dir_name in ['fonts', 'images', 'scripts', 'style']:
        application.add_handlers(
            ".*$",
            [('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, {
                'path':
                sp(os.path.join(base_path, 'couchpotato', 'static', dir_name))
            })])
    Env.set('static_path', static_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root=sp(base_path))
    loader.run()

    # Fill database with needed stuff
    fireEvent('database.setup')
    if not db_exists:
        fireEvent('app.initialize', in_order=True)
    fireEvent('app.migrate')

    # Go go go!
    from tornado.ioloop import IOLoop
    from tornado.autoreload import add_reload_hook
    loop = IOLoop.current()

    # Reload hook
    def reload_hook():
        fireEvent('app.shutdown')

    add_reload_hook(reload_hook)

    # Some logging and fire load event
    try:
        log.info('Starting server on port %(port)s', config)
    except:
        pass
    fireEventAsync('app.load')

    ssl_options = None
    if config['ssl_cert'] and config['ssl_key']:
        ssl_options = {
            'certfile': config['ssl_cert'],
            'keyfile': config['ssl_key'],
        }

    server = HTTPServer(application,
                        no_keep_alive=True,
                        ssl_options=ssl_options)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            server.listen(config['port'], config['host'])
            server.listen(config['port'], config['host6'])
            loop.start()
            server.close_all_connections()
            server.stop()
            loop.close(all_fds=True)
        except Exception as e:
            log.error('Failed starting: %s', traceback.format_exc())
            try:
                nr, msg = e
                if nr == 48:
                    log.info(
                        'Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds',
                        (config.get('port'), restart_tries))
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except ValueError:
                return
            except:
                pass

            raise

        try_restart = False
Ejemplo n.º 35
0
def get_lastest_history_passage(query):
    # query为要爬取的公众号名称
    # 公众号主页
    url = 'https://mp.weixin.qq.com'
    # 设置headers
    header = {
        "HOST": "mp.weixin.qq.com",
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3239.132"
    }
    from requests.packages import urllib3
    urllib3.disable_warnings()  # 关闭警告

    # 读取登陆成功后获取到的cookies
    with open(COOKIES_PATH, 'r', encoding='utf-8') as f:
        cookie = f.read()
    cookies = json.loads(cookie)
    print("Cookies loading successfully.")
    # 增加重试连接次数
    session = requests.Session()
    session.keep_alive = False
    # 增加重试连接次数
    session.adapters.DEFAULT_RETRIES = 511
    try:
        # 登录之后的微信公众号首页url变化为:https://mp.weixin.qq.com/cgi-bin/home?t=home/index&lang=zh_CN&token=1849751598,从这里获取token信息
        response = session.get(url=url, cookies=cookies, verify=False)
        token = re.findall(r'token=(\d+)', str(response.url))[0]
        time.sleep(15)
        # 搜索微信公众号的接口地址
        search_url = 'https://mp.weixin.qq.com/cgi-bin/searchbiz?'
        # 搜索微信公众号接口需要传入的参数,有三个变量:微信公众号token、随机数random、搜索的微信公众号名字
        query_id = {
            'action': 'search_biz',
            'token': token,
            'lang': 'zh_CN',
            'f': 'json',
            'ajax': '1',
            'random': random.random(),
            'query': query,
            'begin': '0',
            'count': '5'
        }
        # 打开搜索微信公众号接口地址,需要传入相关参数信息如:cookies、params、headers
        search_response = session.get(
            search_url,
            cookies=cookies,
            headers=header,
            params=query_id)
        # 取搜索结果中的第一个公众号
        lists = search_response.json().get('list')
        logging.info("查询到公众号信息列表:" + str(lists))
    except Exception as e:
        print(str(e))
        logger.error(str(e))
    try:
        # 获取这个公众号的fakeid,后面爬取公众号文章需要此字段
        fakeid = lists[0].get('fakeid')
        # 微信公众号文章接口地址
        appmsg_url = 'https://mp.weixin.qq.com/cgi-bin/appmsg?'
        # 搜索文章需要传入几个参数:登录的公众号token、要爬取文章的公众号fakeid、随机数random
        query_id_data = {
            'token': token,
            'lang': 'zh_CN',
            'f': 'json',
            'ajax': '1',
            'random': random.random(),
            'action': 'list_ex',
            'begin': '0',  # 不同页,此参数变化,变化规则为每页加5
            'count': '5',
            'query': '',
            'fakeid': fakeid,
            'type': '9'
        }
        # 打开搜索的微信公众号文章列表页
        appmsg_response = session.get(
            appmsg_url,
            cookies=cookies,
            headers=header,
            params=query_id_data,
        )
        # 获取文章总数
        # max_num = appmsg_response.json().get('app_msg_cnt')
        # 只爬取最新
        # max_num = 10
        # 每页至少有5条,获取文章总的页数,爬取时需要分页爬
        # num = int(int(max_num) / 5)
        num = 2
        # 起始页begin参数,往后每页加5
        begin = 0

        # 获取公众号在MySQL数据库中的ID
        official_account_id = db_core.search_wechat_account_is_existed(query)
        # 开始爬取
        print("开始爬取!")
        while num + 1 > 0:
            query_id_data = {
                'token': token,
                'lang': 'zh_CN',
                'f': 'json',
                'ajax': '1',
                'random': random.random(),
                'action': 'list_ex',
                'begin': '{}'.format(str(begin)),
                'count': '5',
                'query': '',
                'fakeid': fakeid,
                'type': '9'
            }
            # print('正在翻页:--------------', begin)
            time.sleep(15)

            # 获取每一页文章的标题和链接地址,并写入本地文本中
            query_fakeid_response = requests.get(
                appmsg_url,
                cookies=cookies,
                headers=header,
                params=query_id_data,
                timeout=3)
            fakeid_list = query_fakeid_response.json().get('app_msg_list')
            if fakeid_list:
                for item in fakeid_list:
                    content_link = item.get('link')                # 文章链接
                    content_title = item.get('title')              # 文章标题
                    content_create_time = item.get('create_time')  # 文章发布时间
                    content_update_time = item.get('update_time')  # 文章更新时间
                    # 组装MongoDB插入数据
                    docs = {"passage_title": content_title,
                            "official_account_id": official_account_id,
                            "passage_link":content_link,
                            "passage_create_time":time_format(content_create_time),
                            "passage_update_time":time_format(content_update_time)}
                    docs_mongo = {"$set":docs}

                    db_core.insert_account_passage_link(content_title, content_link, official_account_id)      # MySQL数据库更新
                    docs_db_core.insert_docs(docs_mongo, passage_link=content_link)                            # MongoDB数据库更新

            num -= 1
            begin = int(begin)
            begin += 5
        print("完成爬取.")
    except Exception as e:
        print(str(e))
        logger.error(str(e))
    finally:
        print("爬虫进程结束.")
Ejemplo n.º 36
0
def DXHTTPRequest(resource,
                  data,
                  method='POST',
                  headers=None,
                  auth=True,
                  timeout=DEFAULT_TIMEOUT,
                  use_compression=None,
                  jsonify_data=True,
                  want_full_response=False,
                  decode_response_body=True,
                  prepend_srv=True,
                  session_handler=None,
                  max_retries=DEFAULT_RETRIES,
                  always_retry=False,
                  **kwargs):
    '''
    :param resource: API server route, e.g. "/record/new". If *prepend_srv* is False, a fully qualified URL is expected. If this argument is a callable, it will be called just before each request attempt, and expected to return a tuple (URL, headers). Headers returned by the callback are updated with *headers* (including headers set by this method).
    :type resource: string
    :param data: Content of the request body
    :type data: list or dict, if *jsonify_data* is True; or string or file-like object, otherwise
    :param headers: Names and values of HTTP headers to submit with the request (in addition to those needed for authentication, compression, or other options specified with the call).
    :type headers: dict
    :param auth: Overrides the *auth* value to pass through to :meth:`requests.request`. By default a token is obtained from the ``DX_SECURITY_CONTEXT``.
    :type auth: tuple, object, True (default), or None
    :param timeout: HTTP request timeout, in seconds
    :type timeout: float
    :param config: *config* value to pass through to :meth:`requests.request`
    :type config: dict
    :param use_compression: "snappy" to use Snappy compression, or None
    :type use_compression: string or None
    :param jsonify_data: If True, *data* is converted from a Python list or dict to a JSON string
    :type jsonify_data: boolean
    :param want_full_response: If True, the full :class:`requests.Response` object is returned (otherwise, only the content of the response body is returned)
    :type want_full_response: boolean
    :param decode_response_body: If True (and *want_full_response* is False), the response body is decoded and, if it is a JSON string, deserialized. Otherwise, the response body is uncompressed if transport compression is on, and returned raw.
    :type decode_response_body: boolean
    :param prepend_srv: If True, prepends the API server location to the URL
    :type prepend_srv: boolean
    :param max_retries: Maximum number of retries to perform for a request. A "failed" request is retried if any of the following is true:

                        - A response is received from the server, and the content length received does not match the "Content-Length" header.
                        - A response is received from the server, and the response has an HTTP status code in 5xx range.
                        - A response is received from the server, the "Content-Length" header is not set, and the response JSON cannot be parsed.
                        - No response is received from the server, and either *always_retry* is True or the request *method* is "GET".

    :type max_retries: int
    :param always_retry: If True, indicates that it is safe to retry a request on failure

                        - Note: It is not guaranteed that the request will *always* be retried on failure; rather, this is an indication to the function that it would be safe to do so.

    :type always_retry: boolean
    :returns: Response from API server in the format indicated by *want_full_response* and *decode_response_body*.
    :raises: :exc:`exceptions.DXAPIError` or a subclass if the server returned a non-200 status code; :exc:`requests.exceptions.HTTPError` if an invalid response was received from the server; or :exc:`requests.exceptions.ConnectionError` if a connection cannot be established.

    Wrapper around :meth:`requests.request()` that makes an HTTP
    request, inserting authentication headers and (by default)
    converting *data* to JSON.

    .. note:: Bindings methods that make API calls make the underlying
       HTTP request(s) using :func:`DXHTTPRequest`, and most of them
       will pass any unrecognized keyword arguments you have supplied
       through to :func:`DXHTTPRequest`.

    '''
    if session_handler is None:
        session_handler = SESSION_HANDLERS[os.getpid()]
    if headers is None:
        headers = {}

    global _UPGRADE_NOTIFY

    url = APISERVER + resource if prepend_srv else resource
    method = method.upper(
    )  # Convert method name to uppercase, to ease string comparisons later
    if _DEBUG >= 3:
        print(method,
              url,
              "=>\n" + json.dumps(data, indent=2),
              file=sys.stderr)
    elif _DEBUG == 2:
        print(method, url, "=>", json.dumps(data), file=sys.stderr)
    elif _DEBUG > 0:
        from repr import Repr
        print(method, url, "=>", Repr().repr(data), file=sys.stderr)

    if auth is True:
        auth = AUTH_HELPER

    if 'verify' not in kwargs and 'DX_CA_CERT' in os.environ:
        kwargs['verify'] = os.environ['DX_CA_CERT']
        if os.environ['DX_CA_CERT'] == 'NOVERIFY':
            kwargs['verify'] = False
            from requests.packages import urllib3
            urllib3.disable_warnings()

    if jsonify_data:
        data = json.dumps(data)
        if 'Content-Type' not in headers and method == 'POST':
            headers['Content-Type'] = 'application/json'

    headers['DNAnexus-API'] = API_VERSION
    headers['User-Agent'] = USER_AGENT

    if use_compression == 'snappy':
        if not snappy_available:
            raise exceptions.DXError(
                "Snappy compression requested, but the snappy module is unavailable"
            )
        headers['accept-encoding'] = 'snappy'

    # If the input is a buffer, its data gets consumed by
    # requests.request (moving the read position). Record the initial
    # buffer position so that we can return to it if the request fails
    # and needs to be retried.
    rewind_input_buffer_offset = None
    if hasattr(data, 'seek') and hasattr(data, 'tell'):
        rewind_input_buffer_offset = data.tell()

    try_index = 0
    while True:
        success, streaming_response_truncated = True, False
        response = None
        try:
            _method, _url, _headers = _process_method_url_headers(
                method, url, headers)
            response = session_handler.request(_method,
                                               _url,
                                               headers=_headers,
                                               data=data,
                                               timeout=timeout,
                                               auth=auth,
                                               **kwargs)

            if _UPGRADE_NOTIFY and response.headers.get(
                    'x-upgrade-info', '').startswith(
                        'A recommended update is available'
                    ) and not os.environ.has_key('_ARGCOMPLETE'):
                logger.info(response.headers['x-upgrade-info'])
                try:
                    with file(_UPGRADE_NOTIFY, 'a'):
                        os.utime(_UPGRADE_NOTIFY, None)
                except:
                    pass
                _UPGRADE_NOTIFY = False

            # If an HTTP code that is not in the 200 series is received and the content is JSON, parse it and throw the
            # appropriate error.  Otherwise, raise the usual exception.
            if response.status_code // 100 != 2:
                # response.headers key lookup is case-insensitive
                if response.headers.get('content-type',
                                        '').startswith('application/json'):
                    content = json.loads(response.content.decode('utf-8'))
                    error_class = getattr(exceptions, content["error"]["type"],
                                          exceptions.DXAPIError)
                    raise error_class(content, response.status_code)
                response.raise_for_status()

            if want_full_response:
                return response
            else:
                if 'content-length' in response.headers:
                    if int(response.headers['content-length']) != len(
                            response.content):
                        range_str = (
                            ' (%s)' %
                            (headers['Range'], )) if 'Range' in headers else ''
                        raise exceptions.ContentLengthError(
                            "Received response with content-length header set to %s but content length is %d%s"
                            % (response.headers['content-length'],
                               len(response.content), range_str))

                if use_compression and response.headers.get(
                        'content-encoding', '') == 'snappy':
                    # TODO: check if snappy raises any exceptions on truncated response content
                    content = snappy.uncompress(response.content)
                else:
                    content = response.content

                if decode_response_body:
                    content = content.decode('utf-8')
                    if response.headers.get('content-type',
                                            '').startswith('application/json'):
                        try:
                            content = json.loads(content)
                            t = int(response.elapsed.total_seconds() * 1000)
                            if _DEBUG >= 3:
                                print(method,
                                      url,
                                      "<=",
                                      response.status_code,
                                      "(%dms)" % t,
                                      "\n" + json.dumps(content, indent=2),
                                      file=sys.stderr)
                            elif _DEBUG == 2:
                                print(method,
                                      url,
                                      "<=",
                                      response.status_code,
                                      "(%dms)" % t,
                                      json.dumps(content),
                                      file=sys.stderr)
                            elif _DEBUG > 0:
                                print(method,
                                      url,
                                      "<=",
                                      response.status_code,
                                      "(%dms)" % t,
                                      Repr().repr(content),
                                      file=sys.stderr)
                            return content
                        except ValueError:
                            # If a streaming API call (no content-length
                            # set) encounters an error it may just halt the
                            # response because it has no other way to
                            # indicate an error. Under these circumstances
                            # the client sees unparseable JSON, and we
                            # should be able to recover.
                            streaming_response_truncated = 'content-length' not in response.headers
                            raise HTTPError(
                                "Invalid JSON received from server")
                return content
            raise AssertionError(
                'Should never reach this line: expected a result to have been returned by now'
            )
        except Exception as e:
            success = False
            exception_msg = _extract_msg_from_last_exception()
            if isinstance(e, _expected_exceptions):
                if response is not None and response.status_code == 503:
                    seconds_to_wait = _extract_retry_after_timeout(response)
                    logger.warn(
                        "%s %s: %s. Waiting %d seconds due to server unavailability...",
                        method, url, exception_msg, seconds_to_wait)
                    time.sleep(seconds_to_wait)
                    # Note, we escape the "except" block here without
                    # incrementing try_index because 503 responses with
                    # Retry-After should not count against the number of
                    # permitted retries.
                    continue

                # Total number of allowed tries is the initial try + up to
                # (max_retries) subsequent retries.
                total_allowed_tries = max_retries + 1
                ok_to_retry = False
                # Because try_index is not incremented until we escape this
                # iteration of the loop, try_index is equal to the number of
                # tries that have failed so far, minus one. Test whether we
                # have exhausted all retries.
                if try_index + 1 < total_allowed_tries:
                    if response is None or isinstance(e, exceptions.ContentLengthError) or \
                       streaming_response_truncated:
                        ok_to_retry = always_retry or (
                            method == 'GET') or _is_retryable_exception(e)
                    else:
                        ok_to_retry = 500 <= response.status_code < 600

                if ok_to_retry:
                    if rewind_input_buffer_offset is not None:
                        data.seek(rewind_input_buffer_offset)
                    delay = min(2**try_index, DEFAULT_TIMEOUT)
                    logger.warn(
                        "%s %s: %s. Waiting %d seconds before retry %d of %d...",
                        method, url, exception_msg, delay, try_index + 1,
                        max_retries)
                    time.sleep(delay)
                    try_index += 1
                    continue

            # All retries have been exhausted OR the error is deemed not
            # retryable. Print the latest error and propagate it back to the caller.
            if not isinstance(e, exceptions.DXAPIError):
                logger.error("%s %s: %s", method, url, exception_msg)
            raise
        finally:
            if success and try_index > 0:
                logger.info("%s %s: Recovered after %d retries", method, url,
                            try_index)

        raise AssertionError(
            'Should never reach this line: should have attempted a retry or reraised by now'
        )
    raise AssertionError(
        'Should never reach this line: should never break out of loop')
Ejemplo n.º 37
0
from requests import request, Session
from requests.packages import urllib3
from requests.exceptions import RequestException

from .Exceptions import HTTPError, SiadError

# Disable pyopenssl. It breaks SSL connection pool when SSL connection is
# closed unexpetedly by the server. And we don't need SNI anyway.
try:
    from requests.packages.urllib3.contrib import pyopenssl
    pyopenssl.extract_from_urllib3()
except ImportError:
    pass

# Disable SNI related Warning. The API does not rely on it
urllib3.disable_warnings(urllib3.exceptions.SNIMissingWarning)
urllib3.disable_warnings(urllib3.exceptions.SecurityWarning)


#: Default timeout for each request.
TIMEOUT = 5

class Client(object):
    """
    """

    def __init__(self, endpoint="127.0.0.1:9980", sia_agent="Sia-Agent", force_discover=False, auth_key=""):
        # Use Requests.Session to avoid any problem during the communication
        self._session = Session()

        self._endpoint = endpoint
Ejemplo n.º 38
0
#-*- coding:utf8 -*-
__author = "huia"

import requests

response = requests.get('https://www.12306.cn')
print(response.status_code)

import requests
from requests.packages import urllib3

urllib3.disable_warnings()  #不显示未验证的警告
response = requests.get('https://www.12306.cn', verify=False)  #verify是否验证证书
print(response.status_code)

#指定证书
import requests

response = requests.get('https://www.12306.cn',
                        cert=('/path/server.crt', '/path/key'))
print(response.status_code)
Ejemplo n.º 39
0
def getTweets():
    time_per_100 = []
    fetch_count = 0
    avg_time_per_fetch = 0
    tweet_count = 0
    modified_at = None

    # disable all HTTPS/TLS warnings
    urllib3.disable_warnings()

    # load api keys from api_keys.json
    keys_file_path = os.path.join(project_root, 'Key', 'api_keys.json')

    with open(keys_file_path) as api_keys:
        keys = json.load(api_keys)

    # obtain multiple instances of Twitter API to circumvent rate limit
    authList = []
    apiList = []
    for i in range(len(keys)):
        authList[i] = tweepy.OAuthHandler(keys[i]['consumer_key'],
                                          keys[i]['consumer_secret'])
        authList[i].set_access_token(keys[i]['access_token'],
                                     keys[i]['access_token_secret'])
        apiList[i] = tweepy.API(authList[i])

    # db_path = os.path.join(os.path.dirname(__file__), os.pardir, 'Data/tweet_ids')

    # connect to DB
    db = connect('Shit_db')

    # drop the database once to ensure whenever the thread is run, we create the db afresh
    db.drop_database('Shit_db')
    t0 = time.time()
    no_of_tweets = 0
    total_no_of_tweets = 0

    # news channel IDs
    # user_id = [34908698,362051343,361501426,7905122,180306960,30857481,28370738,110458336,2883841,28172926,30261067,20562637,113050195,28140646,621523,35773039,15164565,15861355,44316192,44078873,15861220,1642135962,28137012,38400130,32355144,122097236,19230601,713993413,7302282,16877611,2557521,26257166,15110357,4898091,34713362,18949452,32359921,16334857,59736898,214007688,129834917,15108702,39817941,375721095,2424208849,506504366,242689065,116559622,23484039,18424289,64643056,115754870,134758540,6509832,267158021,29958928,15954704,19897138,37034483,36327407,20751449,3123883238,240649814,31632905,177829660,256495314,39743812,245687754,38647512,355989081,98362607,17710740,39240673,17469289,16973333,87818409,18071358,9763482,87416722,4970411,7587032,788524,14173315,612473,28785486,2467791,15012486,5988062,1367531,759251,428333,6017542,3108351,51241574,1652541,14293310,807095,742143,5402612]

    # non news channel IDs
    user_id = [
        79708561, 281766200, 785493949, 250205792, 180463340, 3060210854,
        2305049443, 273181052, 2463499796, 71876190, 26642006, 92367751,
        259379883, 399428964, 26565946, 24494557, 166739404, 52551600,
        25365536, 15485441, 15846407, 14234323, 125481462, 27042513, 133880286,
        243284052, 44588485, 51376979, 27260086, 17919972, 18625669, 16409683,
        21447363, 58135085, 23375688, 92724677, 30973, 50374439, 48410093,
        57928790, 87170183, 102957248, 108391251, 120998613, 115622213,
        113419517, 6463042, 94775494, 131975194, 97865628, 79915337, 332188446,
        41067945, 197150180, 78022296, 31348594, 902534288, 108253263,
        63390627, 145125358, 78242874, 468479147, 36057824, 34464376,
        111871312, 152251488, 121677709, 38403110, 21787625, 494747331,
        94163409, 44849431, 18872373, 105710210, 148248527, 38479920,
        508932270, 183230911, 186388502, 101311381, 70652594, 2719753171,
        23976386, 23002923, 33868638, 16548023, 40453512, 18681139, 279449435,
        144755081, 132385468, 54829997, 266714730, 108252113, 3138637447,
        1111706414, 61755650, 14120922, 216447259, 129786468
    ]
    print "No of users={0}".format(len(user_id))

    last_id = [None for i in range(len(user_id))]
    number = 0
    rate_limit = 180
    no_of_requests = 0
    tweet_list = []
    k = 0  # current_api_index
    api_wait_end_time = time.time(
    )  # stores timestamp till when the first API might have to wait

    while (total_no_of_tweets < 3200 * len(user_id)):
        try:
            status_obj = apiList[k].user_timeline(user_id=user_id[number],
                                                  count=200,
                                                  max_id=last_id[number])
            # print "fetched {0} tweets".format(len(status_obj))
            no_of_requests += 1
            for status in status_obj:
                tweet = Tweet()
                tweet.tweet_id = status.id_str
                tweet.text = status.text
                tweet.created_at = status.created_at
                tweet.in_reply_to_status_id = status.in_reply_to_status_id_str
                tweet.user_id = status.user.id_str
                tweet.user_name = status.user.name
                tweet.user_followers = status.user.followers_count
                tweet.user_location = status.user.location
                tweet.favourites_count = status.user.favourites_count
                if status.coordinates is not None:
                    tweet.coordinates = status.coordinates['coordinates']
                tweet.language = status.lang
                # tweet.place_coordinates   = status['']
                tweet.retweet_count = status.retweet_count
                tweet.retweeted = status.retweeted
                # tweet.inserted_at
                tweet.is_news = True
                # tweet.save()
                tweet_list.append(tweet)
                no_of_tweets = no_of_tweets + 1
                total_no_of_tweets = total_no_of_tweets + 1
                last_id[number] = tweet.tweet_id
                #print(tweet.user_name)
            # print "last id={0}".format(last_id[number])
            # print "total no of tweets {0}".format(no_of_tweets)
            if no_of_requests % 100 == 0:
                print "{0} tweets fetched".format(total_no_of_tweets)

            if (no_of_tweets >= 3200):
                print "Saving Tweets to DB"
                # save tweets to db
                Tweet.objects.insert(tweet_list)
                tweet_list = []
                number += 1

                # if we have fetched tweets for every user, just return
                if number > len(user_id):
                    return
                number = number % len(user_id)
                print "moved to {0} user".format(number)
                no_of_tweets = 0

        except tweepy.RateLimitError:
            print "Saving Tweets to DB"
            # save tweets to db
            Tweet.objects.insert(tweet_list)
            tweet_list = []
            if k == len(apiList) - 1:
                if api_wait_end_time < time.time():
                    # we dont need to wait, so pass
                    pass
                else:
                    sleep_time = api_wait_end_time - time.time()
                    print "create_db: sleeping for {0} seconds".format(
                        sleep_time)
                    time.sleep(sleep_time)
            k = (k + 1) % len(apiList)
            if k == 0:
                # update api_wait_end_time
                api_wait_end_time = time.time() + 15 * 60

            # print("Going to Sleep")
            # print no_of_requests
            # t0 = time.time() - t0
            # if t0 > 16*60:
            #     print "sleeping for {0} sec".format(15*60)
            #     time.sleep(15*60)
            # else:
            #     print "sleeping for {0} sec".format(16*60 - t0)
            #     time.sleep(16*60 - t0)
            # t0 = time.time()

        except Exception as e:
            print("exception came")
            print(str(e))
            time.sleep(15 * 60)
    t0 = time.time() - t0
    t0 = t0 / 60
    print(t0)
    return
Ejemplo n.º 40
0
class BaseClient(object):
    """
    Basic API client
    - Supports Authorization with tokens
    - Caches tokens
    """
    disable_warnings(InsecurePlatformWarning)
    disable_warnings(InsecureRequestWarning)
    disable_warnings(SNIMissingWarning)

    _logger = Logger('api')

    def __init__(self,
                 ip,
                 port,
                 credentials=None,
                 verify=False,
                 version='*',
                 raw_response=False,
                 cache_store=None):
        """
        Initializes the object with credentials and connection information
        :param ip: IP to which to connect
        :type ip: str
        :param port: Port on which to connect
        :type port: int
        :param credentials: Credentials to connect
        :type credentials: tuple
        :param verify: Additional verification
        :type verify: bool
        :param version: API version
        :type version: object
        :param raw_response: Retrieve the raw response value
        :type raw_response: bool
        :param cache_store: Store in which to keep the generated token for the client instance
        :type cache_store: any
        :return: None
        :rtype: NoneType
        """
        if credentials is not None and len(credentials) != 2:
            raise RuntimeError(
                'Credentials should be None (no authentication) or a tuple containing client_id and client_secret (authenticated)'
            )
        self.ip = ip
        self.port = port
        self.client_id = credentials[0] if credentials is not None else None
        self.client_secret = credentials[1] if credentials is not None else None
        self._url = 'https://{0}:{1}/api'.format(ip, port)
        self._key = hashlib.sha256('{0}{1}{2}{3}'.format(
            self.ip, self.port, self.client_id,
            self.client_secret)).hexdigest()
        self._token = None
        self._verify = verify
        self._version = version
        self._raw_response = raw_response
        self._volatile_client = cache_store

    def _connect(self):
        """
        Authenticates to the api
        """
        headers = {
            'Accept':
            'application/json',
            'Authorization':
            'Basic {0}'.format(
                base64.b64encode('{0}:{1}'.format(self.client_id,
                                                  self.client_secret)).strip())
        }
        raw_response = requests.post(url='{0}/oauth2/token/'.format(self._url),
                                     data={'grant_type': 'client_credentials'},
                                     headers=headers,
                                     verify=self._verify)

        try:
            response = self._process(response=raw_response, overrule_raw=True)
        except RuntimeError:
            if self._raw_response is True:
                return raw_response
            raise
        if len(response.keys()) in [1, 2] and 'error' in response:
            error = RuntimeError(response['error'])
            error.status_code = raw_response.status_code
            raise error
        self._token = response['access_token']

    def _build_headers(self):
        """
        Builds the request headers
        :return: The request headers
        :rtype: dict
        """
        headers = {
            'Accept': 'application/json; version={0}'.format(self._version),
            'Content-Type': 'application/json'
        }
        if self._token is not None:
            headers['Authorization'] = 'Bearer {0}'.format(self._token)
        return headers

    @classmethod
    def _build_url_params(cls, params=None):
        """
        Build the URL params
        :param params: URL parameters
        :type params: str
        :return: The url params
        :rtype: string
        """
        url_params = ''
        if params:
            url_params = '?{0}'.format(urllib.urlencode(params))
        return url_params

    def _cache_token(self):
        """
        Caches the JWT
        :return: None
        :rtype: NoneType
        """
        if self._volatile_client is not None:
            self._volatile_client.set(self._key, self._token, 300)

    def _prepare(self, **kwargs):
        """
        Prepares the call:
        * Authentication, if required
        * Preparing headers, returning them
        """
        if self.client_id is not None and self._token is None:
            self._connect()

        headers = self._build_headers()
        params = self._build_url_params(kwargs.get('params'))
        url = '{0}{{0}}{1}'.format(self._url, params)
        self._cache_token(
        )  # Volatile cache might have expired or the key is gone

        return headers, url

    def _process(self, response, overrule_raw=False):
        """
        Processes a call result
        """
        if self._raw_response is True and overrule_raw is False:
            return response

        status_code = response.status_code
        parsed_output = None
        try:
            parsed_output = response.json()
        except:
            pass

        if 200 <= status_code < 300:
            return parsed_output
        else:
            message = None
            if parsed_output is not None:
                if 'error_description' in parsed_output:
                    message = parsed_output['error_description']
                if 'error' in parsed_output:
                    if message is None:
                        message = parsed_output['error']
                    else:
                        message += ' ({0})'.format(parsed_output['error'])
            else:
                messages = {
                    401: 'No access to the requested API',
                    403: 'No access to the requested API',
                    404: 'The requested API could not be found',
                    405: 'Requested method not allowed',
                    406: 'The request was unacceptable',
                    426: 'Upgrade is needed',
                    429: 'Rate limit was hit',
                    500: 'Internal server error'
                }
                if status_code in messages:
                    message = messages[status_code]
            if message is None:
                message = 'Unknown error'
            if status_code in [401, 403]:
                raise HttpForbiddenException(message, '')
            elif status_code == 404:
                raise HttpNotFoundException(message, '')
            else:
                raise HttpException(status_code, message)

    def _call(self, api, params, fct, timeout=None, **kwargs):
        if not api.endswith('/'):
            api = '{0}/'.format(api)
        if not api.startswith('/'):
            api = '/{0}'.format(api)
        if self._volatile_client is not None:
            self._token = self._volatile_client.get(self._key)
        first_connect = self._token is None
        headers, url = self._prepare(params=params)
        try:
            return self._process(
                fct(url=url.format(api),
                    headers=headers,
                    verify=self._verify,
                    timeout=timeout,
                    **kwargs))
        except HttpForbiddenException:
            if self._volatile_client is not None:
                self._volatile_client.delete(self._key)
            if first_connect is True:  # First connect, so no token was present yet, so no need to try twice without token
                raise
            self._token = None
            headers, url = self._prepare(params=params)
            return self._process(
                fct(url=url.format(api),
                    headers=headers,
                    verify=self._verify,
                    **kwargs))
        except Exception:
            if self._volatile_client is not None:
                self._volatile_client.delete(self._key)
            raise

    @classmethod
    def get_instance(cls, connection_info, cache_store=None, version=6):
        """
        Retrieve an OVSClient instance to the connection information passed
        :param connection_info: Connection information, includes: 'host', 'port', 'client_id', 'client_secret'
        :type connection_info: dict
        :param cache_store: Store in which to keep the generated token for the client
        :type cache_store: object
        :param version: Version for the API
        :type version: int
        :return: An instance of the OVSClient class
        :rtype: ovs_extensions.api.client.OVSClient
        """
        ExtensionsToolbox.verify_required_params(
            actual_params=connection_info,
            required_params={
                'host': (str, ExtensionsToolbox.regex_ip),
                'port': (int, {
                    'min': 1,
                    'max': 65535
                }),
                'client_id': (str, None),
                'client_secret': (str, None),
                'local': (bool, None, False)
            })
        return cls(ip=connection_info['host'],
                   port=connection_info['port'],
                   credentials=(connection_info['client_id'],
                                connection_info['client_secret']),
                   version=version,
                   cache_store=cache_store)

    def get(self, api, params=None):
        """
        Executes a GET call
        :param api: Specification to fill out in the URL, eg: /vpools/<vpool_guid>/shrink_vpool
        :param params: Additional query parameters as comma separated list, eg: {'contents':'dynamic1,dynamic2,-dynamic3,_relations,-relation1'}
        :type params: dict
        """
        return self._call(api=api, params=params, fct=requests.get)

    def post(self, api, data=None, params=None):
        """
        Executes a POST call
        :param api: Specification to fill out in the URL, eg: /vpools/<vpool_guid>/shrink_vpool
        :param data: Data to post
        :param params: Additional query parameters, eg: _dynamics
        """
        return self._call(api=api, params=params, fct=requests.post, data=data)

    def put(self, api, data=None, params=None):
        """
        Executes a PUT call
        :param api: Specification to fill out in the URL, eg: /vpools/<vpool_guid>/shrink_vpool
        :param data: Data to put
        :param params: Additional query parameters, eg: _dynamics
        """
        return self._call(api=api, params=params, fct=requests.put, data=data)

    def patch(self, api, data=None, params=None):
        """
        Executes a PATCH call
        :param api: Specification to fill out in the URL, eg: /vpools/<vpool_guid>/shrink_vpool
        :param data: Data to patch
        :param params: Additional query parameters, eg: _dynamics
        """
        return self._call(api=api,
                          params=params,
                          fct=requests.patch,
                          data=data)

    def delete(self, api, params=None):
        """
        Executes a DELETE call
        :param api: Specification to fill out in the URL, eg: /vpools/<vpool_guid>/
        :param params: Additional query parameters, eg: _dynamics
        """
        return self._call(api=api, params=params, fct=requests.delete)
Ejemplo n.º 41
0
def main():
    # We silence the insecure requests warnings we get for using
    # self-signed certificates.
    disable_warnings(InsecureRequestWarning)
    cli(obj=RemoteAppRestContext())
Ejemplo n.º 42
0
def webapi(
        url,
        hostname_override=None,
        principal=None,
        authtype='kerberos',
        username=None,
        password=None,
        verifyssl=False,
        error_action='Stop'):
    """Sends a request to the provided url and authentication configuration. 
    If successfully, a WebAPI object will be constructed from the response 
    and returned. 

    :param url: The URL of the PI Web API server.
    :param authtype: Optional - Options are Basic and Kerberos.
        Default authtype is kerberos.
    :param username: Optional username - Only used for basic auth.
    :param password: Optional password - Only used for basic auth.
    :param verifyssl: Optional SSL verification. If set to False, then
        InsecureRequestWarning will be disabled.
    :return: :class:`WebAPI <WebAPI>` object
    :rtype: osisoftpy.WebAPI
    """
    try:
        s = requests.session()
        s.verify = verifyssl
        if not s.verify:
            disable_warnings(InsecureRequestWarning)
        if authtype == 'kerberos':
            s.auth = HTTPKerberosAuth(
                mutual_authentication=requests_kerberos.OPTIONAL,
                sanitize_mutual_error_response=False,
                hostname_override=hostname_override,
                force_preemptive=True,
                principal=principal)
        else:
            s.auth = requests.auth.HTTPBasicAuth(username, password)
        r = APIResponse(s.get(url), s)
        if r.response.status_code == 401:
            msg = 'Authorization denied - incorrect username or password.'
            if error_action.lower() == 'stop':
                raise Unauthorized(msg)
            else:
                print(msg + ', Continuing')
        if r.response.status_code != 200:
            msg = 'Wrong server response: %s %s' % (r.response.status_code, r.response.reason)
            if error_action.lower() == 'stop':
                raise HTTPError(msg)
            else:
                print(msg + ', Continuing')
        json = r.response.json()
        if 'Errors' in json and json.get('Errors').__len__() > 0:
            msg = 'PI Web API returned an error: {}'
            raise PIWebAPIError(msg.format(json.get('Errors')))

        webapi = create(Factory(WebAPI), json, r.session)
        webapi.dataservers = _get_servers(webapi, s, json, DataServer, 'DataServers')
        webapi.assetservers = _get_servers(webapi, s, json, AssetServer, 'AssetServers')

        return webapi
    except:
        raise
Ejemplo n.º 43
0
from __future__ import absolute_import

from requests.packages import urllib3
urllib3.disable_warnings()  # noqa

from dataverse.connection import Connection  # noqa
from dataverse.dataverse import Dataverse  # noqa
from dataverse.dataset import Dataset  # noqa
from dataverse.file import DataverseFile  # noqa
Ejemplo n.º 44
0
def main(*args):
    import argparse
    import getpass
    import sys

    parser = argparse.ArgumentParser(description='request a SAML token')

    parser.add_argument('-u', '--user', help="account user name")
    parser.add_argument('-p', '--password', help="account password")
    parser.add_argument('-r',
                        '--raw',
                        action='store_true',
                        help="don't pack and wrap the token")
    parser.add_argument('--insecure',
                        action='store_true',
                        help="disable SSL/TLS security checks")
    parser.add_argument('--cert-only',
                        action='store_true',
                        help="output embedded certificates in PEM form")

    # compatibility argument -- we don't print out anything
    parser.add_argument('-s',
                        '--silent',
                        action='store_true',
                        help=argparse.SUPPRESS,
                        default=argparse.SUPPRESS)

    options = parser.parse_args()

    def my_input(prompt):
        sys.stderr.write(prompt)
        return input()

    username = options.user or my_input('User: '******'Password: '******'SSL request failed; you probably need to install the '
               'appropriate certificate authority, or use the correct host '
               'name')
        print(msg, e, file=sys.stderr)
        return 1

    if not options.cert_only:
        sys.stdout.write(token)

    else:
        from lxml import etree
        import ssl

        for el in etree.fromstring(token).findall('.//{*}X509Certificate'):
            data = base64.standard_b64decode(el.text)

            sys.stdout.write(ssl.DER_cert_to_PEM_cert(data))

    return 0
Ejemplo n.º 45
0
# wrong demo
requests.get('http://httpbin.org/cookies/set/number/123456789')
r = requests.get('http://httpbin.org/cookies')
print(r.text)  # {"cookies": {}} no cookies received
# right demo
s = requests.Session()
s.get('http://httpbin.org/cookies/set/number/123456789')
r = s.get('http://httpbin.org/cookies')
print(r.text)
"""SSL certificate authentication"""
# fail to validate certificate
response = requests.get('https://www.12306.cn', verify=False)
print(response.status_code)

# ignore waring
urllib3.disable_warnings()  # logging.captureWarnings(True)
response = requests.get('https://www.12306.cn', verify=False)
print(response.status_code)

# use certificate
response = requests.get('https://www.12306.cn',
                        cert=('/path/server.crt', '/path/key'))
print(response.status_code)
"""Proxy Setting"""
# demo
proxies = {'http': 'http...', 'https': 'https...'}
# use HTTP Basic Auth
proxies = {'http': 'http://*****:*****@...', 'https': '...'}
# use socks
# pip3 install requests[socks]
proxies = {'http': 'socks5://user:password@...', 'https': 'socks5://...'}
Ejemplo n.º 46
0
import requests
import queue
import re
import datetime
from requests.packages import urllib3
urllib3.disable_warnings()
#健康优选版
url = 'https://www.baodan360.com/insurance/detail/id/133339/'
Sex = ['男', '女']
Extra = ['含附加被保人豁免', '不含附加被保人豁免']
Insurance_Period = ['至80周岁', '保30年', '至60周岁', '至70周岁']
Pay_Period = ['1', '5', '10', '20', '30']
Insurance_Amount = [
    x * 10 for x in range(1, 21)
    if x != 4 and x != 7 and x != 9 and x != 14 and x != 19
]
First_City = [110100, 310100, 440100, 440300, 320500, 330200, 330100, 320100]
Second_City = [
    110100, 310100, 120100, 500100, 230100, 220100, 210100, 150100, 130100,
    650100, 620100, 630100, 610100, 640100, 410100, 370100, 140100, 340100,
    420100, 430100, 320100, 510100, 520100, 530100, 450100, 330100, 360100,
    440100, 350100, 710000, 810000, 820000, 460100, 150200, 210200, 220200,
    230600, 410300, 420500, 430700, 450200, 510700, 520300, 340200, 130200,
    130600, 130900, 350200, 350500, 350600, 330300, 330400, 330600, 330700,
    331000, 440400, 440600, 440700, 440800, 440900, 441300, 441900, 442000,
    371600, 371700, 370200, 370300, 370500, 370600, 370700, 370800, 370900,
    371000, 371300, 371400, 371500, 320200, 320300, 320400, 320600, 320800,
    320900, 321000, 321100, 321200
]
RESULTS = []
dictionary_standard = {}
Ejemplo n.º 47
0
def scrapeTweets():
    time_per_100       = []
    fetch_count        = 0
    avg_time_per_fetch = 0
    tweet_count        = 0
    target             = 1000000
    modified_at        = None


    # disable all HTTPS/TLS warnings
    urllib3.disable_warnings()

    # load api keys from api_keys.json
    keys_file_path = os.path.join(os.path.dirname(__file__), os.pardir, 'api_keys.json')

    with open(keys_file_path) as api_keys:    
        keys = json.load(api_keys)

    # provide auth params & obtain an instance of API
    auth = tweepy.OAuthHandler(keys['consumer_key'], keys['consumer_secret'])
    auth.set_access_token(keys['access_token'], keys['access_token_secret'])

    api = tweepy.API(auth)

    db_path = os.path.join(os.path.dirname(__file__), os.pardir, 'Data/tweet_ids')

    # connect to DB
    db = connect('Tweets')

    # drop the database once to ensure whenever the thread is run, we create the db afresh
    db.drop_database('Tweets')

    tweet_id_list = []

    

    with open(db_path) as file_db:
        t0 = time.time()
        for line in file_db:
            status_obj = None
            tweet_id = line.split("\t")[0]
            tweet_id_list.append(tweet_id)
            if(len(tweet_id_list) == 100):
                try:
                    status_obj = api.statuses_lookup(tweet_id_list, [False], [False], [True])
                    for status in status_obj:
                        tweet                       = Tweet()
                        tweet.tweet_id              = status.id_str
                        tweet.text                  = status.text
                        tweet.created_at            = status.created_at
                        tweet.in_reply_to_status_id = status.in_reply_to_status_id_str 
                        tweet.user_id               = status.user.id_str
                        tweet.user_name             = status.user.name
                        tweet.user_followers        = status.user.followers_count
                        tweet.user_location         = status.user.location
                        tweet.favourites_count      = status.user.favourites_count
                        if status.coordinates is not None:
                            tweet.coordinates       = status.coordinates['coordinates']
                        tweet.language              = status.lang
                        # tweet.place_coordinates   = status['']
                        tweet.retweet_count         = status.retweet_count
                        tweet.retweeted             = status.retweeted
                        # tweet.inserted_at
                        tweet.is_news               = None
                        tweet.save()
                    t1 = time.time()
                    time_per_100.append(t1-t0)
                    fetch_count = fetch_count + 1
                    avg_time_per_fetch = sum(time_per_100)/len(time_per_100)
                    tweet_count += len(status_obj)
                    modified_at = datetime.datetime.now().strftime('%H:%M:%S %d-%m-%Y')
                    print("Scraped {0} tweets, Total ={1} tweets".format(
                        len(status_obj), tweet_count))

                    # save all the stats to REDIS
                    r.set('tweet_count', tweet_count)
                    r.set('avg_time_per_fetch', avg_time_per_fetch)
                    r.set('fetch_count', fetch_count)
                    r.set('modified_at', modified_at)
                    # r.set('target', target) 

                except tweepy.RateLimitError:
                    print("Going to Sleep")
                    time.sleep(15 * 60)
        		except Exception as e:
        		    print(str(e))
    	            time.sleep(15 * 60)
                finally:
Ejemplo n.º 48
0
from rucio import version

try:
    # Python 2
    from urlparse import urlparse
    from ConfigParser import NoOptionError, NoSectionError
except ImportError:
    # Python 3
    from urllib.parse import urlparse
    from configparser import NoOptionError, NoSectionError
from dogpile.cache import make_region
from requests import Session
from requests.status_codes import codes, _codes
from requests.exceptions import ConnectionError, RequestException
from requests.packages.urllib3 import disable_warnings  # pylint: disable=import-error
disable_warnings()

# Extra modules: Only imported if available
EXTRA_MODULES = {'requests_kerberos': False}

for extra_module in EXTRA_MODULES:
    try:
        imp.find_module(extra_module)
        EXTRA_MODULES[extra_module] = True
    except ImportError:
        EXTRA_MODULES[extra_module] = False

if EXTRA_MODULES['requests_kerberos']:
    from requests_kerberos import HTTPKerberosAuth  # pylint: disable=import-error

LOG = getLogger(__name__)
Ejemplo n.º 49
0
class EtcdConfiguration(object):
    """
    Configuration class using Etcd.

    Uses a special key format to specify the path within etcd, and specify a path inside the json data
    object that might be stored inside the etcd key.
    key  = <etcd path>[|<json path>]
    etcd path = slash-delimited path
    json path = dot-delimited path

    Examples:
        > EtcdConfiguration.set('/foo', 1)
        > print EtcdConfiguration.get('/foo')
        < 1
        > EtcdConfiguration.set('/foo', {'bar': 1})
        > print EtcdConfiguration.get('/foo')
        < {u'bar': 1}
        > print EtcdConfiguration.get('/foo|bar')
        < 1
        > EtcdConfiguration.set('/bar|a.b', 'test')
        > print EtcdConfiguration.get('/bar')
        < {u'a': {u'b': u'test'}}
    """
    _unittest_data = {}
    base_config = {
        'cluster_id': None,
        'external_etcd': None,
        'plugins/installed': {
            'backends': [],
            'generic': []
        },
        'paths': {
            'cfgdir': '/opt/OpenvStorage/config',
            'basedir': '/opt/OpenvStorage',
            'ovsdb': '/opt/OpenvStorage/db'
        },
        'support': {
            'enablesupport': False,
            'enabled': True,
            'interval': 60
        },
        'storagedriver': {
            'mds_safety': 2,
            'mds_tlogs': 100,
            'mds_maxload': 75
        },
        'webapps': {
            'html_endpoint': '/',
            'oauth2': {
                'mode': 'local'
            }
        }
    }

    disable_warnings(InsecurePlatformWarning)
    disable_warnings(InsecureRequestWarning)
    disable_warnings(SNIMissingWarning)

    def __init__(self):
        """
        Dummy init method
        """
        _ = self

    @staticmethod
    def get(key, raw=False):
        """
        Get value from etcd
        :param key: Key to get
        :param raw: Raw data if True else json format
        :return: Value for key
        """
        key_entries = key.split('|')
        data = EtcdConfiguration._get(key_entries[0], raw)
        if len(key_entries) == 1:
            return data
        temp_data = data
        for entry in key_entries[1].split('.'):
            temp_data = temp_data[entry]
        return temp_data

    @staticmethod
    def set(key, value, raw=False):
        """
        Set value in etcd
        :param key: Key to store
        :param value: Value to store
        :param raw: Raw data if True else json format
        :return: None
        """
        key_entries = key.split('|')
        if len(key_entries) == 1:
            EtcdConfiguration._set(key_entries[0], value, raw)
            return
        try:
            data = EtcdConfiguration._get(key_entries[0], raw)
        except etcd.EtcdKeyNotFound:
            data = {}
        temp_config = data
        entries = key_entries[1].split('.')
        for entry in entries[:-1]:
            if entry in temp_config:
                temp_config = temp_config[entry]
            else:
                temp_config[entry] = {}
                temp_config = temp_config[entry]
        temp_config[entries[-1]] = value
        EtcdConfiguration._set(key_entries[0], data, raw)

    @staticmethod
    def delete(key, remove_root=False, raw=False):
        """
        Delete key - value from etcd
        :param key: Key to delete
        :param remove_root: Remove root
        :param raw: Raw data if True else json format
        :return: None
        """
        key_entries = key.split('|')
        if len(key_entries) == 1:
            EtcdConfiguration._delete(key_entries[0], recursive=True)
            return
        data = EtcdConfiguration._get(key_entries[0], raw)
        temp_config = data
        entries = key_entries[1].split('.')
        if len(entries) > 1:
            for entry in entries[:-1]:
                if entry in temp_config:
                    temp_config = temp_config[entry]
                else:
                    temp_config[entry] = {}
                    temp_config = temp_config[entry]
            del temp_config[entries[-1]]
        if len(entries) == 1 and remove_root is True:
            del data[entries[0]]
        EtcdConfiguration._set(key_entries[0], data, raw)

    @staticmethod
    def exists(key, raw=False):
        """
        Check if key exists in etcd
        :param key: Key to check
        :param raw: Process raw data
        :return: True if exists
        """
        try:
            EtcdConfiguration.get(key, raw)
            return True
        except (KeyError, etcd.EtcdKeyNotFound):
            return False

    @staticmethod
    def dir_exists(key):
        """
        Check if directory exists in etcd
        :param key: Directory to check
        :return: True if exists
        """
        return EtcdConfiguration._dir_exists(key)

    @staticmethod
    def list(key):
        """
        List all keys in tree
        :param key: Key to list
        :return: Generator object
        """
        return EtcdConfiguration._list(key)

    @staticmethod
    def initialize_host(host_id, port_info=None):
        """
        Initialize keys when setting up a host
        :param host_id: ID of the host
        :type host_id: str

        :param port_info: Information about ports to be used
        :type port_info: dict

        :return: None
        """
        if EtcdConfiguration.exists(
                '/ovs/framework/hosts/{0}/setupcompleted'.format(host_id)):
            return
        if port_info is None:
            port_info = {}

        mds_port_range = port_info.get('mds', [26300, 26399])
        arakoon_start_port = port_info.get('arakoon', 26400)
        storagedriver_port_range = port_info.get('storagedriver',
                                                 [26200, 26299])

        host_config = {
            'storagedriver': {
                'rsp': '/var/rsp',
                'vmware_mode': 'ganesha'
            },
            'ports': {
                'storagedriver': [storagedriver_port_range],
                'mds': [mds_port_range],
                'arakoon': [arakoon_start_port]
            },
            'setupcompleted': False,
            'versions': {
                'ovs': 4
            },
            'type': 'UNCONFIGURED'
        }
        for key, value in host_config.iteritems():
            EtcdConfiguration._set('/ovs/framework/hosts/{0}/{1}'.format(
                host_id, key),
                                   value,
                                   raw=False)

    @staticmethod
    def initialize(external_etcd=None, logging_target=None):
        """
        Initialize general keys for all hosts in cluster
        :param external_etcd: ETCD runs on another host outside the cluster
        :param logging_target: Configures (overwrites) logging configuration
        """
        cluster_id = ''.join(
            random.choice(string.ascii_letters + string.digits)
            for _ in range(16))
        if EtcdConfiguration.exists('/ovs/framework/cluster_id'):
            return

        messagequeue_cfg = {
            'endpoints': [],
            'metadata': {
                'internal': True
            },
            'protocol': 'amqp',
            'user': '******',
            'password': '******',
            'queues': {
                'storagedriver': 'volumerouter'
            }
        }

        base_cfg = copy.deepcopy(EtcdConfiguration.base_config)
        base_cfg.update({
            'cluster_id': cluster_id,
            'external_etcd': external_etcd,
            'arakoon_clusters': {},
            'stores': {
                'persistent': 'pyrakoon',
                'volatile': 'memcache'
            },
            'messagequeue': {
                'protocol': 'amqp',
                'queues': {
                    'storagedriver': 'volumerouter'
                }
            },
            'logging': {
                'type': 'console'
            }
        })
        if logging_target is not None:
            base_cfg['logging'] = logging_target
        if EtcdConfiguration.exists('/ovs/framework/memcache') is False:
            base_cfg['memcache'] = {
                'endpoints': [],
                'metadata': {
                    'internal': True
                }
            }
        if EtcdConfiguration.exists('/ovs/framework/messagequeue') is False:
            base_cfg['messagequeue'] = messagequeue_cfg
        else:
            messagequeue_info = EtcdConfiguration.get(
                '/ovs/framework/messagequeue')
            for key, value in messagequeue_cfg.iteritems():
                if key not in messagequeue_info:
                    base_cfg['messagequeue'][key] = value
        for key, value in base_cfg.iteritems():
            EtcdConfiguration._set('/ovs/framework/{0}'.format(key),
                                   value,
                                   raw=False)

    @staticmethod
    @log_slow_calls
    def _dir_exists(key):
        key = EtcdConfiguration._coalesce_dashes(key=key)

        # Unittests
        if hasattr(unittest, 'running_tests') and getattr(
                unittest, 'running_tests') is True:
            stripped_key = key.strip('/')
            current_dict = EtcdConfiguration._unittest_data
            for part in stripped_key.split('/'):
                if part not in current_dict or not isinstance(
                        current_dict[part], dict):
                    return False
                current_dict = current_dict[part]
            return True

        # Real implementation
        try:
            client = EtcdConfiguration._get_client()
            return client.get(key).dir
        except (KeyError, etcd.EtcdKeyNotFound):
            return False

    @staticmethod
    @log_slow_calls
    def _list(key):
        key = EtcdConfiguration._coalesce_dashes(key=key)

        # Unittests
        if hasattr(unittest, 'running_tests') and getattr(
                unittest, 'running_tests') is True:
            data = EtcdConfiguration._unittest_data
            ends_with_dash = key.endswith('/')
            starts_with_dash = key.startswith('/')
            stripped_key = key.strip('/')
            for part in stripped_key.split('/'):
                if part not in data:
                    raise etcd.EtcdKeyNotFound(
                        'Key not found: {0}'.format(key))
                data = data[part]
            if data:
                for sub_key in data:
                    if ends_with_dash is True:
                        yield '/{0}/{1}'.format(stripped_key, sub_key)
                    else:
                        yield sub_key if starts_with_dash is True else '/{0}'.format(
                            sub_key)
            elif starts_with_dash is False or ends_with_dash is True:
                yield '/{0}'.format(stripped_key)
            return

        # Real implementation
        client = EtcdConfiguration._get_client()
        for child in client.get(key).children:
            if child.key is not None and child.key != key:
                yield child.key.replace('{0}/'.format(key), '')

    @staticmethod
    @log_slow_calls
    def _delete(key, recursive):
        key = EtcdConfiguration._coalesce_dashes(key=key)

        # Unittests
        if hasattr(unittest, 'running_tests') and getattr(
                unittest, 'running_tests') is True:
            stripped_key = key.strip('/')
            data = EtcdConfiguration._unittest_data
            for part in stripped_key.split('/')[:-1]:
                if part not in data:
                    raise etcd.EtcdKeyNotFound(
                        'Key not found : {0}'.format(key))
                data = data[part]
            key_to_remove = stripped_key.split('/')[-1]
            if key_to_remove in data:
                del data[key_to_remove]
            return

        # Real implementation
        client = EtcdConfiguration._get_client()
        client.delete(key, recursive=recursive)

    @staticmethod
    @log_slow_calls
    def _get(key, raw):
        key = EtcdConfiguration._coalesce_dashes(key=key)

        # Unittests
        if hasattr(unittest, 'running_tests') and getattr(
                unittest, 'running_tests') is True:
            if key in ['', '/']:
                return
            stripped_key = key.strip('/')
            data = EtcdConfiguration._unittest_data
            for part in stripped_key.split('/')[:-1]:
                if part not in data:
                    raise etcd.EtcdKeyNotFound(
                        'Key not found : {0}'.format(key))
                data = data[part]
            last_part = stripped_key.split('/')[-1]
            if last_part not in data:
                raise etcd.EtcdKeyNotFound('Key not found : {0}'.format(key))
            data = data[last_part]
            if isinstance(data, dict):
                data = None
        else:
            # Real implementation
            client = EtcdConfiguration._get_client()
            data = client.read(key).value

        if raw is True:
            return data
        return json.loads(data)

    @staticmethod
    @log_slow_calls
    def _set(key, value, raw):
        key = EtcdConfiguration._coalesce_dashes(key=key)
        data = value
        if raw is False:
            data = json.dumps(value)

        # Unittests
        if hasattr(unittest, 'running_tests') and getattr(
                unittest, 'running_tests') is True:
            stripped_key = key.strip('/')
            ut_data = EtcdConfiguration._unittest_data
            for part in stripped_key.split('/')[:-1]:
                if part not in ut_data:
                    ut_data[part] = {}
                ut_data = ut_data[part]

            ut_data[stripped_key.split('/')[-1]] = data
            return

        # Real implementation
        client = EtcdConfiguration._get_client()
        client.write(key, data)
        try:

            def _escape(*args, **kwargs):
                _ = args, kwargs
                raise RuntimeError()

            from ovs.extensions.storage.persistentfactory import PersistentFactory
            client = PersistentFactory.get_client()
            signal.signal(signal.SIGALRM, _escape)
            signal.alarm(
                0.5
            )  # Wait only 0.5 seconds. This is a backup and should not slow down the system
            client.set(key, value)
            signal.alarm(0)
        except:
            pass

    @staticmethod
    def _get_client():
        return etcd.Client(port=2379, use_proxies=True)

    @staticmethod
    def _coalesce_dashes(key):
        """
        Remove multiple dashes, eg: //ovs//framework/ becomes /ovs/framework/
        :param key: Key to convert
        :type key: str

        :return: Key without multiple dashes after one another
        :rtype: str
        """
        return ''.join(k if k == '/' else ''.join(group)
                       for k, group in groupby(key))
Ejemplo n.º 50
0
    from requests_kerberos import HTTPKerberosAuth
except Exception:
    # requests_kerberos library require pykerberos.
    # pykerberos require krb5-devel, which isn't python lib.
    # Kerberos users will need to manually install it.
    HTTPKerberosAuth = None

DEFAULT_PORT = 80
SECURED_PORT = 443
SECURED_PROTOCOL = 'https'
DEFAULT_PROTOCOL = 'http'
DEFAULT_API_VERSION = 'v3.1'
BASIC_AUTH_PREFIX = 'Basic'
CLOUDIFY_TENANT_HEADER = 'Tenant'

urllib3.disable_warnings(urllib3.exceptions.InsecurePlatformWarning)


class HTTPClient(object):
    def __init__(self,
                 host,
                 port=DEFAULT_PORT,
                 protocol=DEFAULT_PROTOCOL,
                 api_version=DEFAULT_API_VERSION,
                 headers=None,
                 query_params=None,
                 cert=None,
                 trust_all=False,
                 username=None,
                 password=None,
                 token=None,
Ejemplo n.º 51
0
#

# This script delete policies spread over a number rics
# Intended for parallel processing
# Returns a string with result, either "0" for ok, or "1<fault description>"

import os
import json
import sys
import requests
import traceback

# disable warning about unverified https requests
from requests.packages import urllib3

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

#arg responsecode baseurl num_rics uuid startid count pids pid_id proxy

try:
    if len(sys.argv) != 10:
        print("1Expected 9 args, got "+str(len(sys.argv)-1)+ ". Args: responsecode baseurl num_rics uuid startid count pids pid_id proxy")
        sys.exit()

    responsecode=int(sys.argv[1])
    baseurl=str(sys.argv[2])
    num_rics=int(sys.argv[3])
    uuid=str(sys.argv[4])
    start=int(sys.argv[5])
    count=int(sys.argv[6])
    pids=int(sys.argv[7])
Ejemplo n.º 52
0
    def __init__(self, config):
        """
        Initialization is responsible for fetching service instance objects for each vCenter instance
        pyvmomi has some security checks enabled by default from python 2.7.9 onward to connect to vCenter.
        """

        # Holds all the VMs' instanceuuid that are discovered for each of the vCenter. Going ahead it would hold all the
        # other managed objects of vCenter that would be monitored.
        self.mors = {}

        self.params = config

        global metrics
        global counters

        metrics = util.parse_metrics()
        counters = util.parse_counters()

        self.needed_metrics = {}
        self.configured_metrics = {}
        self.refresh_rates = {}

        for k, v in metrics.items():
            self.configured_metrics.update({util.get_counter(k): v})

        if sys.version_info > (2, 7, 9) and sys.version_info < (3, 0, 0):
            # https://www.python.org/dev/peps/pep-0476/
            # Look for 'Opting out' section in this that talks about disabling the certificate verification

            # Following line helps to disable globally
            ssl._create_default_https_context = ssl._create_unverified_context

        # Disabling the security warning message, as the certificate verification is disabled
        urllib3.disable_warnings()

        try:
            service_instance = connect.SmartConnect(
                host=self.params['host'],
                user=self.params['username'],
                pwd=self.params['password'],
                port=int(self.params['port']))
            atexit.register(connect.Disconnect, service_instance)
            self.service_instance = service_instance
            self._cache_metrics_metadata(self.params['host'])
        except KeyError as ke:
            util.sendEvent(
                "Plugin vmware: Key Error",
                "Improper param.json, key missing: [" + str(ke) + "]", "error")
            sys.exit(-1)
        except ConnectionError as ce:
            util.sendEvent(
                "Plugin vmware: Error connecting to vCenter",
                "Could not connect to the specified vCenter host: [" +
                str(ce) + "]", "critical")
            sys.exit(-1)
        except StandardError as se:
            util.sendEvent("Plugin vmware: Unknown Error", "[" + str(se) + "]",
                           "critical")
            sys.exit(-1)
        except vim.fault.InvalidLogin as il:
            util.sendEvent(
                "Plugin vmware: Error logging into vCenter",
                "Could not login to the specified vCenter host: [" + str(il) +
                "]", "critical")
            sys.exit(-1)
import datetime
import json
import logging
import time
import traceback
from collections import namedtuple

from requests import Session
from requests.packages import urllib3

urllib3.disable_warnings(
)  # for self-signed certificates- still safe (just stops verbal warnings)


class BadRequest(Exception):
    pass


class AuthenticationError(Exception):
    pass


class NotFoundError(Exception):
    pass


class ServerError(Exception):
    pass


class OtherError(Exception):
Ejemplo n.º 54
0
import pandas as pd
from xpinyin import Pinyin
from idlelib.iomenu import encoding
import getpass
import time
from netaddr import IPNetwork, IPAddress
# Checl IP address
#if IPAddress("192.168.0.1") in IPNetwork("192.168.0.0/24"):
#    print "Yay!"
vsipname = input('VIP Address: ')
print("")
vsipname = vsipname.strip()
#srcip = IPAddress(vsipname)

p = Pinyin()
urllib3.disable_warnings()  #disable ssl warning

#導入excel數據    ------begin----------
config_file = input("請輸入需要導入的excel表格名稱,請帶尾碼如:配置資訊.xlsx,設定檔請放在python腳本一個資料夾下:")
#data = xlrd.open_workbook(config_file,'r')
#data = pd.read_excel(config_file, sheet_name="sheet1", engine='openpyxl')
data = pd.read_excel(config_file,
                     sheet_name=0,
                     engine='openpyxl',
                     keep_default_na=False)
#data = pd.read_excel(config_file, sheet_name=0, engine='xlrd')
#data = pd.read_excel(config_file, sheet_name=0)
#df=pandas.read_excel(‘data.xlsx’,engine=‘openpyxl’)
print(data)
#table = data.sheets()[0]
#table = data
Ejemplo n.º 55
0
    def __init__(self):

        disable_warnings()

        lf = LoggerFactory()
        self.logger = lf.create_logger()
Ejemplo n.º 56
0
    def rest_call(self, url, method, data=None, jsondata=False, sensitive=False, extraheaders=None, timeout=60,
                  retry=None, max_retry=30, retry_sleep=10):
        """
        Generic REST call worker function
        :param url: URL for the REST call
        :param method: METHOD for the REST call
        :param data: Optional DATA for the call (for POST/PUT/etc.)
        :param jsondata: If data should be sent as JSON and not www-form-urlencoded
        :param sensitive: Flag if content request/response should be hidden from logging functions
        :param extraheaders: Extra/modified headers
        :param timeout: Requests Timeout
        :param retry: Boolean if request should be retried if failure.
        :param max_retry: Maximum number of retries before giving up
        :param retry_sleep: Time inbetween retries.
        :return: Tuple (Boolean success or failure, Requests.Response object)
        """
        logger.debug('rest_call:')

        # check for SSL verification on this session
        verify = self.__ap_verifyssl

        # Retry loop counter
        retry_count = 0
        if not extraheaders:
            extraheaders = {}

        # Run once logic.
        if not retry:
            run_once = True
        else:
            run_once = False

        if jsondata:
            # need to make sure data is cast to JSON.
            data = json.dumps(data)
            extraheaders['Content-Type'] = 'application/json'

        while retry or run_once:
            headers = {'Accept': 'application/json'}
            # if the request needs extra headers, add them.

            if extraheaders and type(extraheaders) is dict:
                for key, value in extraheaders.items():
                    headers[key] = value

            cookie = self.__http_session.cookies.get_dict()

            # disable warnings and verification if requested.
            if not verify:
                # disable warnings for SSL certs.
                urllib3.disable_warnings()

            logger.debug('url = {0}'.format(url))

            # make request
            try:
                if data:
                    # pre request, dump simple JSON debug
                    if not sensitive and (logger_level <= logging.DEBUG and logger_level != logging.NOTSET):
                        logger.debug('\n\tREQUEST: %s %s\n\tHEADERS: %s\n\tCOOKIES: %s\n\tDATA: %s\n',
                                     method.upper(), url, headers, cookie, data)

                    response = getattr(self.__http_session, method)(url, data=data, headers=headers, verify=verify,
                                                                    stream=True, timeout=timeout, allow_redirects=False)

                else:
                    # pre request, dump simple JSON debug
                    if not sensitive and (logger_level <= logging.DEBUG and logger_level != logging.NOTSET):
                        logger.debug('\n\tREQUEST: %s %s\n\tHEADERS: %s\n\tCOOKIES: %s\n',
                                     method.upper(), url, headers, cookie)

                    response = getattr(self.__http_session, method)(url, headers=headers, verify=verify, stream=True,
                                                                    timeout=timeout, allow_redirects=False)

                # if it's a non-good response, don't accept it - wait and retry
                if response.status_code not in [requests.codes.ok,
                                                requests.codes.no_content,
                                                requests.codes.found,
                                                requests.codes.moved]:

                    # Simple JSON debug
                    if not sensitive and (logger_level <= logging.DEBUG and logger_level != logging.NOTSET):
                        try:
                            logger.debug('RESPONSE HEADERS: %s\n', json.dumps(
                                json.loads(str(response.headers)), indent=4))
                        except ValueError:
                            logger.debug('RESPONSE HEADERS: %s\n', str(response.headers))
                        try:
                            logger.debug('RESPONSE: %s\n', json.dumps(response.json(), indent=4))
                        except ValueError:
                            logger.debug('RESPONSE: %s\n', str(response.text))

                    logger.debug("Error, non-200 response received: %s", response.status_code)

                    if retry:
                        # keep retrying
                        retry_count += 1
                        if retry_count >= max_retry:
                            logger.info("Max retries of %s reached.", max_retry)
                            retry = False
                        # wait a bit to see if issue clears.
                        sleep(retry_sleep)
                    else:
                        # run once is over.
                        run_once = False
                        return False, response

                else:

                    # Simple JSON debug
                    if not sensitive and (logger_level <= logging.DEBUG and logger_level != logging.NOTSET):
                        try:
                            logger.debug('RESPONSE HEADERS: %s\n', json.dumps(
                                json.loads(str(response.headers)), indent=4))
                            logger.debug('RESPONSE: %s\n', json.dumps(response.json(), indent=4))
                        except ValueError:
                            logger.debug('RESPONSE HEADERS: %s\n', str(response.headers))
                            logger.debug('RESPONSE: %s\n', str(response.text))

                    # if retries have been done, update log if requested.
                    if retry_count > 0:
                        logger.debug("Got good response after %s retries. ", retry_count)

                    # run once is over, if set.
                    run_once = False
                    return True, response

            except requests.exceptions.Timeout:

                logger.info("Error, request timeout reached.")

                if retry:
                    # keep retrying
                    retry_count += 1
                    if retry_count >= max_retry:
                        logger.info("Max retries of %s reached.", max_retry)
                        retry = False
                    # wait a bit to see if issue clears.
                    sleep(retry_sleep)
                else:
                    # run once is over.
                    # run_once = False
                    return False, None
Ejemplo n.º 57
0
 def __init__(self,
              url=None,
              ssl_verify=True,
              accept="application/json, */*",
              content_type="application/json",
              user_agent="RESTinstance/%s" % (__version__),
              proxies={},
              schema={},
              spec={},
              instances=[],
              loglevel="WARN"):
     self.request = {
         "method": None,
         "url": None,
         "scheme": "",
         "netloc": "",
         "path": "",
         "query": {},
         "body": None,
         "data": None,
         "auth": None,
         "headers": {
             "Accept": REST._input_string(accept),
             "Content-Type": REST._input_string(content_type),
             "User-Agent": REST._input_string(user_agent),
         },
         "proxies": REST._input_object(proxies),
         "timeout": [None, None],
         "cert": None,
         "sslVerify": REST._input_ssl_verify(ssl_verify),
         "allowRedirects": True,
     }
     if url:
         url = REST._input_string(url)
         if url.endswith("/"):
             url = url[:-1]
         if not url.startswith(("http://", "https://")):
             url = "http://" + url
         url_parts = urlparse(url)
         self.request["scheme"] = url_parts.scheme
         self.request["netloc"] = url_parts.netloc
         self.request["path"] = url_parts.path
     if not self.request["sslVerify"]:
         disable_warnings()
     self.schema = {
         "$schema": "http://json-schema.org/draft-07/schema#",
         "title": url,
         "description": None,
         "default": True,
         "examples": [],
         "type": "object",
         "properties": {
             "request": {
                 "type": "object",
                 "properties": {}
             },
             "response": {
                 "type": "object",
                 "properties": {}
             },
         },
     }
     self.schema.update(self._input_object(schema))
     self.spec = {}
     self.spec.update(self._input_object(spec))
     self.instances = self._input_array(instances)
     self.log_level = self._input_log_level(loglevel)
Ejemplo n.º 58
0
    def __init__(self, timeout=None, clear_handler=None, refresh_handler=None):
        """
        Initialize the Docker wrapper.
        :param timeout: int
        :param clear_handler: callable
        :param refresh_handler: callable
        """

        assert callable(clear_handler)
        assert callable(refresh_handler)

        self.exception = None

        self.handlers = {
            'attach': (self.attach, 'Attach to a running container.'),
            'build': (self.build, ("Build a new image from the source"
                                   " code")),
            'clear': (clear_handler, "Clear the window."),
            'create': (self.create, 'Create a new container.'),
            'exec': (self.execute, ("Run a command in a running"
                                    " container.")),
            'help': (self.help, "Help on available commands."),
            'pause': (self.pause, "Pause all processes within a container."),
            'ps': (self.containers, "List containers."),
            'port': (self.port, ("List port mappings for the container, or "
                                 "lookup the public-facing port that is "
                                 "NAT-ed to the private_port.")),
            'pull': (self.pull, ("Pull an image or a repository from the "
                                 "registry.")),
            'push': (self.push, ("Push an image or a repository to the "
                                 "registry.")),
            'images': (self.images, "List images."),
            'info': (self.info, "Display system-wide information."),
            'inspect':
            (self.inspect,
             "Return low-level information on a " + "container or image."),
            'login': (self.login, ("Register or log in to a Docker registry "
                                   "server (defaut "
                                   "\"https://index.docker.io/v1/\").")),
            'logs': (self.logs, "Fetch the logs of a container."),
            'refresh': (refresh_handler, "Refresh autocompletions."),
            'restart': (self.restart, "Restart a running container."),
            'run': (self.run, "Run a command in a new container."),
            'rm': (self.rm, "Remove one or more containers."),
            'rmi': (self.rmi, "Remove one or more images."),
            'search': (self.search, "Search the Docker Hub for images."),
            'shell': (self.shell, "Get shell into a running container."),
            'start': (self.start, "Restart a stopped container."),
            'stop': (self.stop, "Stop a running container."),
            'tag': (self.tag, "Tag an image into a repository."),
            'top': (self.top, "Display the running processes of a container."),
            'unpause': (self.unpause, ("Unpause all processes within a "
                                       "container.")),
            'version': (self.version, "Show the Docker version information."),
            'volume create': (self.volume_create, "Create a new volume."),
            'volume inspect': (self.volume_inspect, "Inspect one or more "
                               "volumes."),
            'volume ls': (self.volume_ls, "List volumes."),
            'volume rm': (self.volume_rm, "Remove a volume."),
        }

        self.output = None
        self.after = None
        self.command = None
        self.logs = None

        self.is_refresh_containers = False
        self.is_refresh_running = False
        self.is_refresh_images = False
        self.is_refresh_volumes = False

        disable_warnings()

        if sys.platform.startswith('darwin') \
                or sys.platform.startswith('win32'):
            try:
                # mac or win
                kwargs = kwargs_from_env()
                # hack from here:
                # http://docker-py.readthedocs.org/en/latest/boot2docker/
                # See also: https://github.com/docker/docker-py/issues/406
                if 'tls' in kwargs:
                    kwargs['tls'].assert_hostname = False
                kwargs['timeout'] = timeout
                self.instance = AutoVersionClient(**kwargs)

            except DockerException as x:
                if 'CERTIFICATE_VERIFY_FAILED' in x.message:
                    raise DockerSslException(x)
                elif 'ConnectTimeoutError' in x.message:
                    raise DockerTimeoutException(x)
                else:
                    raise x
        else:
            # unix-based
            kwargs = kwargs_from_env(ssl_version=ssl.PROTOCOL_TLSv1,
                                     assert_hostname=False)
            kwargs['timeout'] = timeout
            self.instance = AutoVersionClient(**kwargs)
Ejemplo n.º 59
0
# ================================================================================================
# local imports
from .auth import AuthConfig
from .resource import Resource, JSONResource
from .exception import RestClientConfigurationError
from .utils import URLValidator

# ================================================================================================
#  Interface tweak
from requests.packages.urllib3 import disable_warnings
from requests.packages.urllib3.exceptions import InsecureRequestWarning

logger = logging.getLogger(__name__)

disable_warnings(InsecureRequestWarning)


class ParameterConfig:
    """Contain and validate parameters for a REST endpoint. As this is a
    configuration container only, the main purpose is to store the config and
    check if the input aligns with the intended use. There is no validation
    beyond this point

    """

    # -----------------------------------------------------------------------------------------------------
    def __init__(self,
                 name: str,
                 required: bool = False,
                 multiple: bool = False,
Ejemplo n.º 60
0
from argparse import ArgumentParser
from json import dumps, loads
from os.path import basename, splitext
from pprint import pprint
from sys import stderr, path
from time import time

from requests import post
from requests.packages.urllib3 import disable_warnings
from urllib3.exceptions import InsecureRequestWarning

# Import encryption routines.
path.insert(1, 'RAU_crypto')
from RAU_crypto import RAUCipher

disable_warnings(category=InsecureRequestWarning)


def send_request(url, files):
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
        'Connection': 'close',
        'Accept-Language': 'en-US,en;q=0.5',
        'Accept':
        'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
        'Upgrade-Insecure-Requests': '1',
    }
    response = post(url,
                    files=files,
                    headers=headers,