def post(url, data, timeout=None): """Request an URL. Args: url: The web location we want to retrieve. data: A dict of (str, unicode) key/value pairs. timeout: float. If this value is specified, use it as the definitive timeout (in seconds) for urlopen() operations. [Optional] Notes: If neither `timeout` nor `data['timeout']` is specified. The underlying defaults are used. Returns: A JSON object. """ urlopen_kwargs = {} if timeout is not None: urlopen_kwargs['timeout'] = timeout if InputFile.is_inputfile(data): data = InputFile(data) request = Request(url, data=data.to_form(), headers=data.headers) else: data = json.dumps(data) request = Request(url, data=data.encode(), headers={'Content-Type': 'application/json'}) result = urlopen(request, **urlopen_kwargs).read() return _parse(result)
def load(self, filename): """ _load_ Unpickle data from file """ # TODO: currently support both loading from file path or url # if there are more things to filter may be separate the load function # urllib2 needs a scheme - assume local file if none given if not urlparse(filename)[0]: filename = 'file:' + filename handle = urlopen(Request(filename, headers={"Accept": "*/*"})) self.data = pickle.load(handle) handle.close() elif filename.startswith('file:'): handle = urlopen(Request(filename, headers={"Accept": "*/*"})) self.data = pickle.load(handle) handle.close() else: # use own request class so we get authentication if needed from WMCore.Services.Requests import Requests request = Requests(filename) data = request.makeRequest('', incoming_headers={"Accept": "*/*"}, decoder=False) self.data = pickle.loads(data[0]) return
def test_make_request(self): url = 'https://foo.bar' observed = issue_reporter.make_request(url) self.assertEqual( Request(url, headers=fakes.GITHUB_HEADERS).headers, observed.headers) self.assertEqual( Request(url, headers=fakes.GITHUB_HEADERS).get_full_url(), observed.get_full_url())
def exists(self): req = Request("https://pypi.python.org/pypi/{}".format(self.name)) req.get_method = lambda: "HEAD" try: urlopen(req) return True except HTTPError as e: if e.code == 404: return False raise
def exists(self): req = Request("https://pypi.org/pypi/{}".format(self.name)) req.get_method = lambda: "HEAD" try: urlopen(req) return True except HTTPError as e: if e.code == 404: return False raise
def _build_send_req(self, url, method='GET'): h = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } try: request = Request(url, headers=h) request.get_method = lambda: method response = urlopen(request) return json.loads(response.read()) except HTTPError as e: if e.code == http.client.NOT_FOUND: return None else: raise e
def _open_remote_file(fn): """ Opens a remote file, trying different locations fn (str): the filename return (File): the opened File-like from urllib2 raise HTTPError: in case of failure to find the file """ # Sadly our website has to block requests with user-agent that looks too # much like bots and script-kiddies. That includes the default Python # user-agent. IOW, it checks that the caller is clever enough to # change the user-agent. So we have to show that we are "that" clever... headers = {"User-Agent": "Mozilla/5.0 Odemis"} for url in VIEWER_ROOT_URLS: try: web_url = url + fn req = Request(web_url, headers=headers) web_file = urlopen(req, timeout=10) break except HTTPError as err: if err.getcode() == 404 and url != VIEWER_ROOT_URLS[-1]: logging.info( "Opening URL %s failed, will try another address", web_url) continue raise # It should now either have succeeded or raised an exception return web_file
def fetch_dataset(url, sourcefile, destfile, totalsz): """ Download the file specified by the given URL. Args: url (str): Base URL of the file to be downloaded. sourcefile (str): Name of the source file. destfile (str): Path to the destination. totalsz (int): Size of the file to be downloaded. """ req = Request('/'.join([url, sourcefile]), headers={'User-Agent': 'neon'}) # backport https limitation and workaround per http://python-future.org/imports.html cloudfile = urlopen(req) neon_logger.display("Downloading file: {}".format(destfile)) blockchar = u'\u2588' # character to display in progress bar with open(destfile, 'wb') as f: data_read = 0 chunksz = 1024**2 while 1: data = cloudfile.read(chunksz) if not data: break data_read = min(totalsz, data_read + chunksz) progress_string = u'Download Progress |{:<50}| '.format( blockchar * int(float(data_read) / totalsz * 50)) sys.stdout.write('\r') if PY3: sys.stdout.write(progress_string) else: sys.stdout.write(progress_string.encode("utf-8")) sys.stdout.flush() f.write(data) neon_logger.display("Download Complete")
def log_event(settings, event, source, sourcetype, host, index): if event is None: sys.stderr.write("ERROR No event provided\n") return False query = [('source', source), ('sourcetype', sourcetype), ('index', index)] if host: query.append(('host', host)) url = '%s/services/receivers/simple?%s' % (settings.get('server_uri'), urlencode(query)) try: encoded_body = unicode(event).encode('utf-8') req = Request( url, encoded_body, {'Authorization': 'Splunk %s' % settings.get('session_key')}) res = urlopen(req) if 200 <= res.code < 300: sys.stderr.write( "DEBUG receiver endpoint responded with HTTP status=%d\n" % res.code) return True else: sys.stderr.write( "ERROR receiver endpoint responded with HTTP status=%d\n" % res.code) return False except HTTPError as e: sys.stderr.write("ERROR Error sending receiver request: %s\n" % e) except URLError as e: sys.stderr.write("ERROR Error sending receiver request: %s\n" % e) except Exception as e: sys.stderr.write("ERROR Error %s\n" % e) return False
def create_ticket(self, api_key, fields, files=None): """ Create ticket on osTicket server. :arg api_key: (String) API-Key :arg fields: (String --> String) dictionary containing keys name, email, subject, message :arg files: (None or list of Strings) pathname of zip files that should be attached :returns: (int) response code :raises ValueError: ticket upload failed :raises urllib.error.HTTPError: key not accepted :raises urllib.error.URLError: connection problem """ if not files: files = [] fields["attachments"] = [] for fn in files: with open(fn, "rb") as f: encoded_data = base64.b64encode(f.read()) att_desc = { os.path.basename(fn): "data:application/zip;base64,%s" % encoded_data } fields["attachments"].append(att_desc) description = json.dumps(fields) req = Request(OS_TICKET_URL, description, headers={"X-API-Key": api_key}) f = urlopen(req) response = f.getcode() f.close() if response == RESPONSE_SUCCESS: return else: raise ValueError('Ticket creation failed with error code %s.' % response)
def send_webhook_request(url, body, user_agent=None): if url is None: sys.stderr.write("ERROR No URL provided\n") return False sys.stderr.write( "INFO Sending POST request to url=%s with size=%d bytes payload\n" % (url, len(body))) sys.stderr.write("DEBUG Body: %s\n" % body) try: if sys.version_info >= (3, 0) and type(body) == str: body = body.encode() req = Request(url, body, { "Content-Type": "application/json", "User-Agent": user_agent }) res = urlopen(req) if 200 <= res.code < 300: sys.stderr.write( "INFO Webhook receiver responded with HTTP status=%d\n" % res.code) return True else: sys.stderr.write( "ERROR Webhook receiver responded with HTTP status=%d\n" % res.code) return False except HTTPError as e: sys.stderr.write("ERROR Error sending webhook request: %s\n" % e) except URLError as e: sys.stderr.write("ERROR Error sending webhook request: %s\n" % e) except ValueError as e: sys.stderr.write("ERROR Invalid URL: %s\n" % e) return False
def _url_query(self, api_path, authenticate=True): i = 0 while i <= self.retry_limit: i += 1 try: query_url = self._server_url + api_path req = Request(query_url) if self.wif and authenticate: headers = storjcore.auth.create_headers( self.btctxstore, self._get_server_address(), self.wif ) req.add_header("Date", headers["Date"]) req.add_header("Authorization", headers["Authorization"]) logger.info("Query: {0}".format(query_url)) response = urlopen(req, timeout=30) if 200 <= response.code <= 299: return response.read() except HTTPError as e: #logger.warning(repr(e)) duplicate log entry if e.code == 409: raise exceptions.AddressAlreadyRegistered(self.auth_address(), self._server_url) elif e.code == 404: raise exceptions.ServerNotFound(self._server_url) elif e.code == 400: raise exceptions.InvalidAddress(self.auth_address()) elif e.code == 401: # auth error (likely clock off) logger.warning(logmessages.InvalidAuthenticationHeaders()) #log "HTTP Error 401: UNAUTHORIZED" elif e.code == 500: # pragma: no cover raise exceptions.ServerError(self._server_url) else: raise e # pragma: no cover except http.client.HTTPException as e: logger.warning(repr(e)) except URLError as e: logger.warning(repr(e)) except socket.error as e: logger.warning(repr(e)) # retry delay = self.retry_delay logger.info("Query retry in {0} seconds.".format(delay)) time.sleep(delay) # retry limit logger.error("Failed to connect to {0}".format(self._server_url)) raise exceptions.ConnectionError(self._server_url)
def _build_req(self, method, data=None, name=None, query=None): """ Build request object :param method: HTTP Method :param data: body data :param name: key,etc. :param query: query params :return: request object """ h = {'Authorization': 'Splunk %s' % self.session_key} if h is not None: h['Content-Type'] = 'application/json' data_bytes = to_bytes(json.dumps(data)) req = Request(self._build_uri(name, query=query), data_bytes, h) req.get_method = lambda: method return req
def download_file(url): # Create request structure LOG.debug('Trying to download: {}'.format(url)) try: req = Request(url) except ValueError as e: fatal('Client error. Could not download %r as the URL is invalid. Error: %r' % (url, str(e))) req.add_header('User-Agent', 'ARM Update Client/%s)' % uc_version) # Read and return try: r = urlopen(req) content = r.read() r.close() return content except URLError as e: fatal('Could not download %r. Client error: %r' % (url, str(e))) except HTTPError as e: fatal('Could not download %r. Server error: %r' % (url, str(e)))
def _build_send_req(self, url, method='GET', data=None): h = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } try: encoded_data = urlencode(data) if data else None # Note: cannot use json.dumps on data because splunk savedsearch REST doesn't like it request = Request(url, data=utils.to_bytes(encoded_data), headers=h) request.get_method = lambda: method response = urlopen(request) return json.loads(response.read()) except HTTPError as e: if e.code == http.client.NOT_FOUND: return None else: raise SavedsearchInternalException(e)
def _refresh(self, http_request): refresh_time = datetime.datetime.utcnow() metadata_root = os.environ.get( 'GCE_METADATA_ROOT', 'metadata.google.internal') token_url = ('http://{}/computeMetadata/v1/instance/service-accounts/' 'default/token').format(metadata_root) req = Request(token_url, headers={'Metadata-Flavor': 'Google'}) token_data = json.loads(urlopen(req, timeout=60).read().decode('utf-8')) self.access_token = token_data['access_token'] self.token_expiry = (refresh_time + datetime.timedelta(seconds=token_data['expires_in']))
def _generate_cloudwatch_input_request(self, method, data=None, name=None): base_url = '%s/servicesNS/nobody/Splunk_TA_aws/splunk_ta_aws_aws_cloudwatch/%s?%s' headers = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } # Handle the query params that are passed in server_uri = em_common.get_server_uri() query_params = dict(output_mode='json') query_params['count'] = 1 query_params['offset'] = 0 # Build the URL and make the request url = base_url % (server_uri, name or '', urlencode(query_params)) request = Request(url, to_bytes(urlencode(data)) if data else None, headers=headers) request.get_method = lambda: method return request
def fetch_dataset(url, sourcefile, destfile, totalsz): full_url = url + '/' + sourcefile print(full_url) req = Request(full_url, headers={'User-Agent': 'Arhat'}) cloudfile = urlopen(req) with open(destfile, 'wb') as f: data_read = 0 chunksz = 1024**2 while 1: data = cloudfile.read(chunksz) if not data: break data_read = min(totalsz, data_read + chunksz) f.write(data)
def _build_send_req(self, method, stanza_name=None, data=None, query=None): """ Build request object :param method: HTTP Method :param data: body data :param stanza_name: stanza in conf file to write to :param query: query params :return: response object """ headers = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } ''' The REALLY annoying thing about CONF endpoints is that, unlike KV store endpoints, the format in which they take data arguments is the application/x-www-form-urlencoded. This means, if you have {a: 'b', c: 'd'}, you would just stringify this for KV store calls but for CONF endpoints, you need to send it like &a=b&c=d. :param params: dict of data we want to convert to this other format :return: a formatted string like a=b&c=d&enough=nonsense ''' try: data_string = to_bytes(urlencode(data)) if data else None req = Request(url=self._build_uri(stanza_name, query=query), data=data_string, headers=headers) req.get_method = lambda: method res = urlopen(req) return json.loads(res.read()) except HTTPError as e: if e.code == http.client.NOT_FOUND: return None else: raise ConfManagerInternalException(e)
def urlopen(url, auto_deflate=False): """ urlopen wrapper """ from future.moves.urllib.request import urlopen, Request from noc.core.http.proxy import setup_urllib_proxies setup_urllib_proxies() if url.startswith("http://") or url.startswith("https://"): r = Request(url, headers={"User-Agent": "NOC/%s" % version.version.strip()}) else: r = url if auto_deflate and url.endswith(".gz"): u = urlopen(r) f = six.StringIO(u.read()) return gzip.GzipFile(fileobj=f) return urlopen(r)
def _get_feed_root(self, url, extra_get_args={}): """ Get an Atom feed of application information from the remote provider. """ try: target_url = url # Forward GET arguments, and add user-agent. args_dict = {} headers = {} args_dict.update(self.request["query"]) if (len(extra_get_args) > 0): args_dict.update(extra_get_args) if self._platformInfo: args_dict.update(self._platformInfo) args = urlencode(args_dict) if args != "": target_url += ("?" + args) logger.debug("Getting feed from: %s" % target_url) if self._agent: headers["User-Agent"] = self._agent bundle_paths.BundleInstaller().validate_server_cert( target_url, self._sslpol) req = Request(target_url, None, headers) f = urlopen(req, None, URLOPEN_TIMEOUT) except HTTPError as e: raise splunk.RESTException(e.code, e.msg) except URLError as e: logger.exception(e) raise splunk.RESTException( 503, "Splunk is unable to connect to the Internet to find more apps." ) except Exception as e: logger.exception(e) raise splunk.RESTException(404, "Resource not found") try: root = safe_lxml.parse(f).getroot() f.close() return root except Exception as e: raise splunk.InternalServerError(e)
def _url_query(self, api_path, authenticate=True): # NOQA i = 0 while i <= self.retry_limit: i += 1 try: query_url = self._server_url + api_path req = Request(query_url) starttime = datetime.utcnow() if self.wif and authenticate: headers = storjcore.auth.create_headers( self.btctxstore, self._get_server_address(), self.wif) req.add_header("Date", headers["Date"]) req.add_header("Authorization", headers["Authorization"]) logger.info("Query: {0} generated in {1}".format( query_url, datetime.utcnow() - starttime)) response = urlopen(req, timeout=30) if 200 <= response.code <= 299: return response.read() except HTTPError as e: if e.code == 409: raise exceptions.AddressAlreadyRegistered( self.get_nodeid(), self._server_url) elif e.code == 404: raise exceptions.ServerNotFound(self._server_url + api_path) elif e.code == 400: raise exceptions.InvalidAddress(self.get_nodeid()) elif e.code == 401: # auth error (likely clock off) # log "HTTP Error 401: UNAUTHORIZED" logger.warning(logmessages.InvalidAuthenticationHeaders()) elif e.code == 500: # pragma: no cover raise exceptions.ServerError(self._server_url) else: raise e # pragma: no cover except http.client.HTTPException as e: logger.warning(repr(e)) except URLError as e: logger.warning(repr(e)) except socket.error as e: logger.warning(repr(e)) # retry delay = self.retry_delay logger.info("Query retry in {0} seconds.".format(delay)) time.sleep(delay) # retry limit logger.error("Failed to connect to {0}".format(self._server_url)) raise exceptions.ConnectionError(self._server_url)
def create_ticket(self, api_key, fields, files=None): """ Create ticket on osTicket server. :arg api_key: (String) API-Key :arg fields: (String --> String) dictionary containing keys name, email, subject, message :arg files: (None or list of Strings) pathname of zip files that should be attached :returns: (int) response code :raises ValueError: ticket upload failed :raises urllib.error.HTTPError: key not accepted :raises urllib.error.URLError: connection problem """ if not files: files = [] fields["attachments"] = [] for fn in files: # File is open as bytes and converted to "bytes" with base64. # We convert it then to a string by "decoding" it from ascii. # The string is inserted with the rest of the dict fields (all strings). # The dict is converted to JSON, which is then encoded into bytes using UTF-8 encoding. # We probably could avoid the bytes -> string -> bytes conversion, # but it's easier as-is, and doesn't seem to be too costly. with open(fn, "rb") as f: encoded_data = base64.b64encode(f.read()).decode("ascii") att_desc = { os.path.basename(fn): "data:application/zip;base64,%s" % encoded_data } fields["attachments"].append(att_desc) description = json.dumps(fields).encode("utf-8") # data must be bytes, but the headers can be str or bytes req = Request(OS_TICKET_URL, data=description, headers={"X-API-Key": api_key}) f = urlopen(req) response = f.getcode() f.close() if response == RESPONSE_SUCCESS: return else: raise ValueError('Ticket creation failed with error code %s.' % response)
def shorten(self, url, user_id): if not (url.startswith('http://') or url.startswith('https://')): url = 'http://' + url try: url2 = self.shorten_template.format(token=str(self.token), user_ids=str(user_id), url=str(url)) request = Request(url2, headers={'Content-Type': 'application/json'}) with urlopen(request) as response: shortened_url = response.read() return shortened_url.decode('utf-8') except HTTPError as error: self.logger.warn('Botan track error ' + str(error.code) + ':' + error.read().decode( 'utf-8')) return None except URLError as error: self.logger.warn('Botan track error ' + str(error.reason)) return None
def track(self, message, event_name='event'): try: uid = message.chat_id except AttributeError: self.logger.warn('No chat_id in message') return False data = message.to_json() try: url = self.url_template.format( token=str(self.token), uid=str(uid), name=quote(event_name)) request = Request( url, data=data.encode(), headers={'Content-Type': 'application/json'}) urlopen(request) return True except HTTPError as error: self.logger.warn('Botan track error ' + str(error.code) + ':' + error.read().decode( 'utf-8')) return False except URLError as error: self.logger.warn('Botan track error ' + str(error.reason)) return False
def send_incident(self, incident): """ send incident to VictorOps :param incident: a dict that contains key/value info of the incident """ try: url = "%s/%s/%s" % (INTEGRATION_URL, self.api_key, self.routing_key) headers = { 'content-type': 'application/json', } incident.update({ 'monitoring_tool': self.monitoring_tool }) req = Request(url, utils.to_bytes(json.dumps(incident)), headers) response = urlopen(req) message, status_code = response.msg, response.code if status_code != 200: raise VictorOpsCouldNotSendAlertException("status_code=%s, message=%s" % (status_code, message)) except Exception as e: logger.error("Failed to send incident to VictorOps because: %s", e) raise VictorOpsCouldNotSendAlertException(str(e))
def download(name, url, size, target, step=None): with tqdm( bar_format="{l_bar}{bar}| [{remaining}, {rate_fmt}]", desc="{:<18}".format(name), leave=False, total=size, unit="B", unit_scale=True, ) as bar: with target.open("wb", ensure=True) as fh: url_request = Request(url) with contextlib.closing(urlopen(url_request)) as socket: while True: block = socket.read(4096) if not block: break fh.write(block) bar.update(len(block)) if step: ProgressOverall.done_part(step, bar.n / size) if bar.n != size: raise RuntimeError( "Error downloading %s: received %d bytes, expected %d" % (url, bar.n, size))
def forward_port(self, proto, src_port, dest_ip, dest_port=None): """ Creates a new mapping for the default gateway to forward ports. Source port is from the perspective of the original client. For example, if a client tries to connect to us on port 80, the source port is port 80. The destination port isn't necessarily 80, however. We might wish to run our web server on a different port so we can have the router forward requests for port 80 to another port (what I call the destination port.) If the destination port isn't specified, it defaults to the source port. Proto is either TCP or UDP. Function returns None on success, otherwise it raises an exception. """ proto = proto.upper() valid_protos = ["TCP", "UDP"] if proto not in valid_protos: raise Exception("Invalid protocol for forwarding.") valid_ports = range(1, 65535) if src_port not in valid_ports: raise Exception("Invalid port for forwarding.") # Source port is forwarded to same destination port number. if dest_port is None: dest_port = src_port # Use UPnP binary for forwarding on Windows. if platform.system() == "Windows": cmd = "upnpc-static.exe -a %s %s %s %s" % (get_lan_ip(), str(src_port), str(dest_port), proto) out, err = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() if "is not recognized" in err: raise Exception("Missing upnpc-static.exe") return # Find gateway address. gateway_addr = self.find_gateway() if gateway_addr is None: raise Exception("Unable to find UPnP compatible gateway.") # Get control URL. rhost = re.findall('([^/]+)', gateway_addr) res = urlopen(gateway_addr, timeout=self.timeout).read().decode("utf-8") res = res.replace('\r', '') res = res.replace('\n', '') res = res.replace('\t', '') pres = res.split('<serviceId>urn:upnp-org:serviceId:WANIPConn1' '</serviceId>') p2res = pres[1].split('</controlURL>') p3res = p2res[0].split('<controlURL>') ctrl = p3res[1] rip = res.split('<presentationURL>') rip1 = rip[1].split('</presentationURL>') router_ip = rip1[0] port_map_desc = "PyP2P" msg = \ '<?xml version="1.0"?><s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"><s:Body><u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1"><NewRemoteHost></NewRemoteHost><NewExternalPort>' \ + str(src_port) \ + '</NewExternalPort><NewProtocol>' + str(proto) + '</NewProtocol><NewInternalPort>' \ + str(dest_port) + '</NewInternalPort><NewInternalClient>' + str(dest_ip) \ + '</NewInternalClient><NewEnabled>1</NewEnabled><NewPortMappingDescription>' + str(port_map_desc) + '</NewPortMappingDescription><NewLeaseDuration>0</NewLeaseDuration></u:AddPortMapping></s:Body></s:Envelope>' # Attempt to add new port map. x = 'http://' + rhost[1] + '/' + ctrl if sys.version_info >= (3, 0, 0): msg = bytes(msg, "utf-8") req = Request('http://' + rhost[1] + '/' + ctrl, msg) req.add_header('SOAPAction', '"urn:schemas-upnp-org:service:WANIPConnection:1#AddPortMapping"' ) req.add_header('Content-type', 'application/xml') res = urlopen(req, timeout=self.timeout)
def handle_POST(self): """ Install a remote application in response to an HTTP POST. """ self.verifyAllowRemote() parts = len(self.pathParts) if parts == self.BASE_DEPTH + 2: default_version = True elif parts == self.BASE_DEPTH + 3: default_version = False else: raise splunk.BadRequest if HTTP_AUTH_TOKEN not in self.args: raise splunk.BadRequest("Missing argument: %s" % HTTP_AUTH_TOKEN) if HTTP_ACTION not in self.args: raise splunk.BadRequest("Missing argument: %s" % HTTP_ACTION) if self.args[HTTP_ACTION] not in (HTTP_ACTION_INSTALL, HTTP_ACTION_DOWNLOAD): raise splunk.BadRequest("Invalid value '%s' for argument '%s'" % (self.args[HTTP_ACTION], HTTP_ACTION)) # check if this is a cloud stack if isCloud(self.sessionKey): app_name = self.pathParts[self.BASE_DEPTH + 1] # Get all cloud apps and see if the app being installed is vetted for cloud # i.e install_method == simple # TODO: Change to just querying for the app in question when BASE-4074 # is finished. getargs = {'offset': 0, 'limit': 100} vetted_apps = [] while 1: serverResponse, serverContent = splunk.rest.simpleRequest( VETTED_APPS_URI, self.sessionKey, getargs) if serverResponse.status != 200: raise splunk.BadRequest( 'Error while querying Splunkbase. Splunkd returned %s' % serverContent) vetted_app_data = json.loads(serverContent) if not vetted_app_data['results']: break else: getargs['offset'] += 100 vetted_apps.extend(vetted_app_data['results']) for app in vetted_apps: if app['appid'] == app_name and app[ 'install_method'] == VETTED_APP_INSTALL_METHOD: break else: raise splunk.BadRequest( 'App %s is not vetted for Splunk Cloud.' % app_name) url = self._native_to_foreign_url() root = self._get_feed_root(url) if default_version: root = self._get_latest_version(root) href = self._parse_link(root) try: # Package up a Request with auth information. req = Request(href) # XXX: Converting the auth token from a POST arg to a header # requires us to unquote() it. If the client did not correctly # quote() the token, login will fail. req.add_header(HTTP_AUTH_HEADER, unquote(self.args[HTTP_AUTH_TOKEN])) # Install using this Request object. installer = bundle_paths.BundleInstaller() if self.args[HTTP_ACTION] == HTTP_ACTION_INSTALL: b, status = installer.install_from_url(req, sslpol=self._sslpol) self.response.setStatus(status) if ((status == bundle_paths.BundleInstaller.STATUS_INSTALLED) or (status == bundle_paths.BundleInstaller.STATUS_UPGRADED)): # Migrate old-style bundles. logger.debug("Configuring application contents") try: b.migrate() except Exception as e: logger.exception(e) self.addMessage("WARN", "Error during configuration: %s" % e) # Redirect to local application. self.response.setHeader("Location", self._redirect_to_local(b)) # Let splunkd know about newly-installed app. logger.debug( "Notifying splunkd that app has been installed") splunk.rest.simpleRequest('apps/local/_reload', sessionKey=self.sessionKey) if status == bundle_paths.BundleInstaller.STATUS_INSTALLED: self.addMessage("INFO", "Installed application: %s" % b.name()) elif status == bundle_paths.BundleInstaller.STATUS_UPGRADED: self.addMessage("INFO", "Upgraded application: %s" % b.name()) else: self.addMessage( "WARN", "Could not install application: %s" % b.name()) else: assert self.args[HTTP_ACTION] == HTTP_ACTION_DOWNLOAD downloaded = installer.download_from_url(req, sslpol=self._sslpol) self.addMessage("INFO", "Downloaded application file: %s" % downloaded) self.response.setHeader('content-type', 'application/json') response_json = {"downloaded": downloaded} self.response.write(json.dumps(response_json)) except splunk.ResourceNotFound: raise except splunk.AuthorizationFailed: raise except splunk.InternalServerError: raise except Exception as e: logger.exception(e) raise splunk.InternalServerError(e)
def send(self, data): request = Request(self.endpoint + '?' + urlencode(self.fixUTF8(data)).encode('utf-8'), headers={'User-Agent': self.user_agent}) self.open(request)
def is_valid_response(self, req_args={}, **kwargs): """ Submits a reCAPTCHA request for verification and returns its status. INPUT: - ``req_args`` - a dictionary; the arguments of the responding user's HTTP POST request - ``kwargs`` - a dictionary of extra keyword arguments OUTPUT: - a :class:`ChallengeResponse` instance; whether the user's response is empty, accepted, or rejected, with an optional error string TESTS:: sage: from sagenb.notebook.challenge import reCAPTCHAChallenge sage: tmp = tmp_dir() sage: import sagenb.gui.notebook as n sage: nb = n.Notebook(tmp) sage: chal = reCAPTCHAChallenge(nb.conf, remote_ip = 'localhost') sage: req = {} sage: chal.is_valid_response(req).is_valid sage: chal.is_valid_response(req).error_code '' sage: req['recaptcha_response_field'] = ['subplotTimes'] sage: chal.is_valid_response(req).is_valid False sage: chal.is_valid_response(req).error_code 'incorrect-captcha-sol' sage: req['simple_challenge_field'] = ['VBORw0KGgoANSUhEUgAAAB'] sage: chal.is_valid_response(req).is_valid # random False sage: chal.is_valid_response(req).error_code # random 'incorrect-captcha-sol' """ response_field = req_args.get('recaptcha_response_field', [None])[0] if not (response_field and len(response_field)): return ChallengeResponse(None, '') challenge_field = req_args.get('recaptcha_challenge_field', [None])[0] if not (challenge_field and len(challenge_field)): return ChallengeResponse(False, 'incorrect-captcha-sol') def encode_if_necessary(s): if isinstance(s, str): return s.encode('utf-8') return s params = urlencode({ 'privatekey': encode_if_necessary(self.private_key), 'remoteip': encode_if_necessary(self.remote_ip), 'challenge': encode_if_necessary(challenge_field), 'response': encode_if_necessary(response_field) }) request = Request( url="http://%s/verify" % RECAPTCHA_VERIFY_SERVER, data=params, headers={ "Content-type": "application/x-www-form-urlencoded", "User-agent": "reCAPTCHA Python" } ) httpresp = urlopen(request) return_values = httpresp.read().splitlines() httpresp.close() return_code = return_values[0] if (return_code == "true"): return ChallengeResponse(True) else: return ChallengeResponse(False, return_values[1])
def make_request(url): """Make our JSON request to GitHub""" return Request(url, headers={ "Authorization": "token %s" % GITHUB_API_TOKEN, "Content-Type": "application/json", })
def query_server(url, data=None, cert_file=None): """ @param str url: URL of server to connect to @param dict data: optional data to include in the request @param str cert_file: optional path to PEM certificate file which will be used to validate the identity of the server Allows for connecting to a server with a provided URL and returns a response dictionary. """ # Check if data is not given if data is None: data = {} # Make the url request headers = {'Content-Type': 'application/json'} # If no data is provided urllib2 will use GET method. If data is provided urllib2 will use POST method. if data is not None and len(data) > 0: request_data = ensure_binary(json.dumps(data)) else: request_data = None req = Request(url, request_data, headers) try: # Enforce Python version number when urllib2 added support for cafile parameter if sys.version_info < (2, 7, 9): if cert_file: raise RuntimeError( "Python version 2.7.9 or later is required to establish HTTPS connection with server." ) response = urlopen(req) else: try: response = urlopen(req, cafile=cert_file) except TypeError: # Unsupported cafile param was passed to urlopen so retry without cafile param. # It is unknown how this condition can arise as the cafile param was added in python 2.7.9 which # we check above, however, the following issue has been reported: # TypeError: urlopen() got an unexpected keyword argument 'cafile' logger.warning( "Host authentication is not supported. " "Falling back to connection without host authentication." ) response = urlopen(req) except ValueError: logger.warning( "Host authentication is not supported because Python is missing SSL. " "Falling back to connection without host authentication." ) response = urlopen(req) except HTTPError as e: raise RuntimeError('The server could not fulfill the request.' '\n' 'Error code: ' + str(e.code) + '.' '\n' 'Please check the followings: \n \ - URL is correct \n \ - server is running \n \ - there is no firewall preventing connections to server' ) except URLError as e: raise RuntimeError('Failed to reach the server with URL: ' + "\"" + url + "\"" '\n' 'Reason: ' + str(e.reason) + '.' '\n' 'Please check the followings: \n \ - URL is correct \n \ - server is running \n \ - there is no firewall preventing connections to server' ) return_data = json.loads(response.read()) return return_data