def ping_google(sitemap_url=None, ping_url=PING_URL): """ Alerts Google that the sitemap for the current site has been updated. If sitemap_url is provided, it should be an absolute path to the sitemap for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this function will attempt to deduce it by using urls.reverse(). """ if sitemap_url is None: try: # First, try to get the "index" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.index') except NoReverseMatch: try: # Next, try for the "global" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.sitemap') except NoReverseMatch: pass if sitemap_url is None: raise SitemapNotFound("You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected.") if not django_apps.is_installed('django.contrib.sites'): raise ImproperlyConfigured("ping_google requires django.contrib.sites, which isn't installed.") Site = django_apps.get_model('sites.Site') current_site = Site.objects.get_current() url = "http://%s%s" % (current_site.domain, sitemap_url) params = urlencode({'sitemap': url}) urlopen("%s?%s" % (ping_url, params))
def notify(self, text, fields): """Send a webhook notification to Slack. Args: text (unicode): The text to send. fields (list of dict): A list of fields to include in the notification. """ payload = { 'username': self.settings['notify_username'], 'icon_url': 'http://images.reviewboard.org/rbslack/logo.png', 'attachments': [ { 'color': '#efcc96', 'fallback': text, 'fields': fields, }, ], } channel = self.settings['channel'] if channel: payload['channel'] = channel try: urlopen(Request(self.settings['webhook_url'], json.dumps(payload))) except Exception as e: logging.error('Failed to send notification to slack.com: %s', e, exc_info=True)
def notify(self, text, fields): """Send a webhook notification to Slack.""" payload = { 'username': self.settings['notify_username'], 'icon_url': 'http://images.reviewboard.org/rbslack/logo.png', 'attachments': [ { 'color': '#efcc96', 'fallback': text, 'fields': fields, }, ], } channel = self.settings['channel'] if channel: payload['channel'] = channel try: urlopen(Request(self.settings['webhook_url'], json.dumps(payload))) except Exception as e: logging.error('Failed to send notification to slack.com: %s', e, exc_info=True)
def ping_google(sitemap_url=None, ping_url=PING_URL): """ Alerts Google that the sitemap for the current site has been updated. If sitemap_url is provided, it should be an absolute path to the sitemap for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this function will attempt to deduce it by using urlresolvers.reverse(). """ if sitemap_url is None: try: # First, try to get the "index" sitemap URL. sitemap_url = urlresolvers.reverse("django.contrib.sitemaps.views.index") except urlresolvers.NoReverseMatch: try: # Next, try for the "global" sitemap URL. sitemap_url = urlresolvers.reverse("django.contrib.sitemaps.views.sitemap") except urlresolvers.NoReverseMatch: pass if sitemap_url is None: raise SitemapNotFound("You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected.") from django.contrib.sites.models import Site current_site = Site.objects.get_current() url = "http://%s%s" % (current_site.domain, sitemap_url) params = urlencode({"sitemap": url}) urlopen("%s?%s" % (ping_url, params))
def purge(self, url): url_parsed = urlparse(url) host = url_parsed.hostname # Append port to host if it is set in the original URL if url_parsed.port: host += (':' + str(url_parsed.port)) request = PurgeRequest( url=urlunparse([ self.cache_scheme, self.cache_netloc, url_parsed.path, url_parsed.params, url_parsed.query, url_parsed.fragment ]), headers={ 'Host': host, 'User-Agent': 'Wagtail-frontendcache/' + __version__ } ) try: urlopen(request) except HTTPError as e: logger.error("Couldn't purge '%s' from HTTP cache. HTTPError: %d %s", url, e.code, e.reason) except URLError as e: logger.error("Couldn't purge '%s' from HTTP cache. URLError: %s", url, e.reason)
def send(self, address, alert, language='en'): """Send a message to Slack""" if self._is_still_backing_off_for(address.address): raise DispatcherException( "Refusing to send Slack alert until backoff period has expired" ) params = { 'text': alert.messages.get(language=language, type='sms').message, 'username': self.username, 'channel': self.channel, 'icon_emoji': self.emoji } payload = json.dumps(params) if isinstance(payload, six.text_type): payload = payload.encode("utf-8") request = Request(address.address, payload, {'Content-Type': 'application/json'}) try: urlopen(request) except HTTPError as error: if error.code == HTTP_TOO_MANY_REQUESTS: self._register_failure_for(address.address) raise DispatcherException( "Slack complained there were too many requests; need to back off" ) else: raise
def purge(self, url): url_parsed = urlparse(url) host = url_parsed.hostname # Append port to host if it is set in the original URL if url_parsed.port: host += (':' + str(url_parsed.port)) request = PurgeRequest(url=urlunparse([ self.cache_scheme, self.cache_netloc, url_parsed.path, url_parsed.params, url_parsed.query, url_parsed.fragment ]), headers={ 'Host': host, 'User-Agent': 'Wagtail-frontendcache/' + __version__ }) try: urlopen(request) except HTTPError as e: logger.error( "Couldn't purge '%s' from HTTP cache. HTTPError: %d %s", url, e.code, e.reason) except URLError as e: logger.error("Couldn't purge '%s' from HTTP cache. URLError: %s", url, e.reason)
def dispatch_webhook_event(request, webhook_targets, event, payload): """Dispatch the given event and payload to the given WebHook targets.""" encoder = ResourceAPIEncoder() bodies = {} for webhook_target in webhook_targets: if webhook_target.use_custom_content: try: body = render_custom_content(webhook_target.custom_content, payload) except Exception as e: logging.exception("Could not render WebHook payload: %s", e) continue else: encoding = webhook_target.encoding if encoding not in bodies: try: if encoding == webhook_target.ENCODING_JSON: adapter = JSONEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_XML: adapter = XMLEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_FORM_DATA: adapter = JSONEncoderAdapter(encoder) body = urlencode({"payload": adapter.encode(payload, request=request)}) else: logging.error( 'Unexpected WebHookTarget encoding "%s" ' "for ID %s", encoding, webhook_target.pk ) continue except Exception as e: logging.exception("Could not encode WebHook payload: %s", e) continue body = body.encode("utf-8") bodies[encoding] = body else: body = bodies[encoding] headers = { "X-ReviewBoard-Event": event, "Content-Type": webhook_target.encoding, "Content-Length": len(body), "User-Agent": "ReviewBoard-WebHook/%s" % get_package_version(), } if webhook_target.secret: signer = hmac.new(webhook_target.secret.encode("utf-8"), body, hashlib.sha1) headers["X-Hub-Signature"] = "sha1=%s" % signer.hexdigest() logging.info("Dispatching webhook for event %s to %s", event, webhook_target.url) try: urlopen(Request(webhook_target.url, body, headers)) except Exception as e: logging.exception("Could not dispatch WebHook to %s: %s", webhook_target.url, e)
def notify(self, text, fields): """Send a webhook notification to Slack.""" payload = { "channel": self.settings["channel"], "username": self.settings["notify_username"], "icon_url": "http://images.reviewboard.org/rbslack/logo.png", "attachments": [{"color": "#efcc96", "fallback": text, "fields": fields}], } urlopen(Request(self.settings["webhook_url"], json.dumps(payload)))
def ping_google(sitemap_url=None, ping_url=PING_URL): """ Alerts Google that the sitemap for the current site has been updated. If sitemap_url is provided, it should be an absolute path to the sitemap for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this function will attempt to deduce it by using urls.reverse(). """ sitemap_full_url = _get_sitemap_full_url(sitemap_url) params = urlencode({'sitemap': sitemap_full_url}) urlopen('%s?%s' % (ping_url, params))
def test_urllib2_urlopen(self): """ Test the File storage API with a file like object coming from urllib2.urlopen() """ file_like_object = urlopen(self.live_server_url + '/') f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) remote_file = urlopen(self.live_server_url + '/') with self.storage.open(stored_filename) as stored_file: self.assertEqual(stored_file.read(), remote_file.read())
def send(self, address, alert, language='en'): """Send a message to Slack""" params = { 'text': alert.messages.get(language=language, type='sms').message, 'username': self.username, 'channel': self.channel, 'icon_emoji': self.emoji } request = Request(address.address, json.dumps(params), {'Content-Type': 'application/json'}) urlopen(request)
def send(self, address, alert, language='en'): """Send a message to Slack""" params = { 'text': alert.messages.get(language=language, type='sms').message, 'username': self.username, 'channel': self.channel, 'icon_emoji': self.emoji } payload = json.dumps(params) if isinstance(payload, six.text_type): payload = payload.encode("utf-8") request = Request(address.address, payload, {'Content-Type': 'application/json'}) urlopen(request)
def dispatch_webhook_event(request, webhook_targets, event, payload): """Dispatch the given event and payload to the given webhook targets.""" encoder = BasicAPIEncoder() bodies = {} for webhook_target in webhook_targets: if webhook_target.use_custom_content: body = render_custom_content(webhook_target.custom_content, payload) else: encoding = webhook_target.encoding if encoding not in bodies: if encoding == webhook_target.ENCODING_JSON: adapter = JSONEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_XML: adapter = XMLEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_FORM_DATA: adapter = JSONEncoderAdapter(encoder) body = urlencode({ 'payload': adapter.encode(payload, request=request), }) else: logging.error( 'Unexpected WebHookTarget encoding "%s" for ' 'ID %s', encoding, webhook_target.pk) continue body = body.encode('utf-8') bodies[encoding] = body else: body = bodies[encoding] headers = { 'X-ReviewBoard-Event': event, 'Content-Type': webhook_target.encoding, 'Content-Length': len(body), 'User-Agent': 'ReviewBoard-WebHook/%s' % get_package_version(), } if webhook_target.secret: signer = hmac.new(webhook_target.secret.encode('utf-8'), body) headers['X-Hub-Signature'] = 'sha1=%s' % signer.hexdigest() logging.info('Dispatching webhook for event %s to %s', event, webhook_target.url) urlopen(Request(webhook_target.url, body, headers))
def dispatch_webhook_event(request, webhook_targets, event, payload): """Dispatch the given event and payload to the given webhook targets.""" encoder = BasicAPIEncoder() bodies = {} for webhook_target in webhook_targets: if webhook_target.use_custom_content: body = render_custom_content(webhook_target.custom_content, payload) else: encoding = webhook_target.encoding if encoding not in bodies: if encoding == webhook_target.ENCODING_JSON: adapter = JSONEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_XML: adapter = XMLEncoderAdapter(encoder) body = adapter.encode(payload, request=request) elif encoding == webhook_target.ENCODING_FORM_DATA: adapter = JSONEncoderAdapter(encoder) body = urlencode({ 'payload': adapter.encode(payload, request=request), }) else: logging.error('Unexpected WebHookTarget encoding "%s" for ' 'ID %s', encoding, webhook_target.pk) continue body = body.encode('utf-8') bodies[encoding] = body else: body = bodies[encoding] headers = { 'X-ReviewBoard-Event': event, 'Content-Type': webhook_target.encoding, 'Content-Length': len(body), 'User-Agent': 'ReviewBoard-WebHook/%s' % get_package_version(), } if webhook_target.secret: signer = hmac.new(webhook_target.secret.encode('utf-8'), body) headers['X-Hub-Signature'] = 'sha1=%s' % signer.hexdigest() logging.info('Dispatching webhook for event %s to %s', event, webhook_target.url) urlopen(Request(webhook_target.url, body, headers))
def clean_idonethis_api_token(self): """Clean and validate the 'idonethis_api_token' field. This performs a test against the I Done This authentication test endpoint to ensure that the provided API token is valid. We only care if the request is successful, so we ignore the returned user data. Returns: unicode: Validated API token with leading and trailing whitespace removed, or an empty string if the API token is empty. Raises: django.core.exceptions.ValidationError: Raised if the API token validation fails. """ api_token = self.cleaned_data['idonethis_api_token'].strip() if not api_token: return '' request = create_idonethis_request('noop', api_token) logging.debug('IDoneThis: Validating API token for user "%s", ' 'request "%s %s"', self.user.username, request.get_method(), request.get_full_url()) try: urlopen(request) except (HTTPError, URLError) as e: if isinstance(e, HTTPError): error_info = '%s, error data: %s' % (e, e.read()) else: error_info = e.reason logging.error('IDoneThis: Failed to validate API token for user ' '"%s", request "%s %s": %s', self.user.username, request.get_method(), request.get_full_url(), error_info) raise forms.ValidationError( ugettext('Error validating the API Token. Make sure the token ' 'matches your I Done This Account Settings.')) return api_token
def get_urls(self, query, start, **kwargs): url = self.make_query(query, start, **kwargs) logger.info("Fetching results from %s", url) response = urlopen(url).read().decode("utf8") results = json.loads(response)["responseData"]["results"] for item in results: yield item["unescapedUrl"]
def _get_response(url, xml_body): """Takes and returns an ElementTree xml document.""" req = Request(url, ElementTree.tostring(xml_body, encoding='utf-8')) response = urlopen(req) ret = ElementTree.fromstring(response.read()) response.close() return ret
def purge(self, url): try: response = urlopen('https://www.cloudflare.com/api_json.html', data=urlencode({ 'email': self.cloudflare_email, 'tkn': self.cloudflare_token, 'a': 'zone_file_purge', 'z': urlparse(url).netloc, 'url': url }).encode('utf-8')) except HTTPError as e: logger.error( "Couldn't purge '%s' from Cloudflare. HTTPError: %d %s", url, e.code, e.reason) return except URLError as e: logger.error("Couldn't purge '%s' from Cloudflare. URLError: %s", url, e.reason) return response_json = json.loads(response.read().decode('utf-8')) if response_json['result'] == 'error': logger.error( "Couldn't purge '%s' from Cloudflare. Cloudflare error '%s'", url, response_json['msg']) return
def _get_user_team_ids_uncached(): request = create_idonethis_request(request_path='teams', api_token=api_token) logging.debug( 'IDoneThis: Loading teams for user "%s", ' 'request "%s %s"', user.username, request.get_method(), request.get_full_url()) try: teams_data = urlopen(request).read() except (HTTPError, URLError) as e: if isinstance(e, HTTPError): error_info = '%s, error data: %s' % (e, e.read()) else: error_info = e.reason logging.error( 'IDoneThis: Failed to load teams for user "%s", ' 'request "%s %s": %s', user.username, request.get_method(), request.get_full_url(), error_info) raise try: return set(t['hash_id'] for t in json.loads(teams_data)) except Exception as e: logging.error( 'IDoneThis: Failed to parse teams for user "%s": ' '%s, teams data: %s', user.username, e, teams_data) raise
def purge(self, url): try: response = urlopen( "https://www.cloudflare.com/api_json.html", data=urlencode( { "email": self.cloudflare_email, "tkn": self.cloudflare_token, "a": "zone_file_purge", "z": urlparse(url).netloc, "url": url, } ).encode("utf-8"), ) except HTTPError as e: logger.error("Couldn't purge '%s' from Cloudflare. HTTPError: %d %s", url, e.code, e.reason) return except URLError as e: logger.error("Couldn't purge '%s' from Cloudflare. URLError: %s", url, e.reason) return response_json = json.loads(response.read().decode("utf-8")) if response_json["result"] == "error": logger.error("Couldn't purge '%s' from Cloudflare. Cloudflare error '%s'", url, response_json["msg"]) return
def get_file_http(self, url, path, revision): logging.info('Fetching file from %s' % url) try: request = URLRequest(url) if self.username: auth_string = base64.b64encode('%s:%s' % (self.username, self.password)) request.add_header('Authorization', 'Basic %s' % auth_string) return urlopen(request).read() except HTTPError as e: if e.code == 404: logging.error('404') raise FileNotFoundError(path, revision) else: msg = "HTTP error code %d when fetching file from %s: %s" % \ (e.code, url, e) logging.error(msg) raise SCMError(msg) except Exception as e: msg = "Unexpected error fetching file from %s: %s" % (url, e) logging.error(msg) raise SCMError(msg)
def generate_file(self): githubbers = self.get_github(json.load(urlopen(GITHUB_REPOS))) path = os.path.join(settings.HUMANSTXT_ROOT, "humans.txt") with open(path, 'w') as target: self.write_to_file(githubbers, target, "Contributors on GitHub", "Developer")
def _get_user_team_ids_uncached(): request = create_idonethis_request(request_path='teams', api_token=api_token) logging.debug('IDoneThis: Loading teams for user "%s", ' 'request "%s %s"', user.username, request.get_method(), request.get_full_url()) try: teams_data = urlopen(request).read() except (HTTPError, URLError) as e: if isinstance(e, HTTPError): error_info = '%s, error data: %s' % (e, e.read()) else: error_info = e.reason logging.error('IDoneThis: Failed to load teams for user "%s", ' 'request "%s %s": %s', user.username, request.get_method(), request.get_full_url(), error_info) raise return set(t['hash_id'] for t in json.loads(teams_data))
def get_metric_data(target, start="-5min", end="now"): """ Retrieves raw datapoints from a graphite target for a given period of time. :param target: A metric path string or a list of multiple metric paths :param start: A start time specification that Graphite will accept. :param end: An end time specification that Graphite will accept. :returns: A raw, response from Graphite. Normally a list of dicts that represent the names and datapoints of each matched target, like so:: [{'target': 'x', 'datapoints': [(value, timestamp), ...]}] """ if not target: return [] # no point in wasting time on http requests for no data base = CONFIG.get("graphiteweb", "base") url = urljoin(base, "/render/") # What does Graphite accept of formats? Lets check if the parameters are # datetime objects and try to force a format then if isinstance(start, datetime): start = start.strftime('%H:%M%Y%m%d') if isinstance(end, datetime): end = end.strftime('%H:%M%Y%m%d') query = { 'target': target, 'from': start, 'until': end, 'format': 'json', } query = urlencode(query, True) _logger.debug("get_metric_data%r", (target, start, end)) req = Request(url, data=query.encode('utf-8')) try: response = urlopen(req) json_data = json.load(codecs.getreader('utf-8')(response)) _logger.debug("get_metric_data: returning %d results", len(json_data)) return json_data except HTTPError as err: _logger.error("Got a 500 error from graphite-web when fetching %s" "with data %s", err.url, query) _logger.error("Graphite output: %s", err.fp.read()) raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except URLError as err: raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except ValueError: # response could not be decoded return [] finally: try: response.close() except NameError: pass
def raw_metric_query(query): """Runs a query for metric information against Graphite's REST API. :param query: A search string, e.g. "nav.devices.some-gw_example_org.*" :returns: A list of matching metrics, each represented by a dict. """ base = CONFIG.get("graphiteweb", "base") url = urljoin(base, "/metrics/find") query = urlencode({'query': query}) url = "%s?%s" % (url, query) req = Request(url) try: response_data = urlopen(req).read().decode('utf-8') return json.loads(response_data) except URLError as err: raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except ValueError: # response could not be decoded return [] finally: try: response.close() except NameError: pass
def index(request, uri): """ Proxies render requests to graphite-web, as configured in graphite.conf """ base = CONFIG.get('graphiteweb', 'base') if request.method in ('GET', 'HEAD'): query = _inject_default_arguments(request.GET) url = urljoin(base, uri + ('?' + query) if query else '') req = Request(url) elif request.method == 'POST': data = _inject_default_arguments(request.POST) url = urljoin(base, uri) req = Request(url, data) else: return HttpResponseNotAllowed(['GET', 'POST', 'HEAD']) LOGGER.debug("proxying request to %r", url) proxy = urlopen(req) headers = proxy.info() content_type = headers.getheader('Content-Type', 'text/html') if request.method == 'HEAD': response = HttpResponse(content_type=content_type) response['Content-Length'] = headers.getheader('Content-Length', '0') else: response = HttpResponse(proxy.read(), content_type=content_type) response['X-Where-Am-I'] = request.get_full_path() return response
def _send_web_request(self, url, payload, attempts=1): """Send out a web request and retry on failure. TODO: Currently this is a blocking operation. Devising a way to send these requests without blocking would be benificial. Args: url (unicode): The URL to send the request to. payload (unicode): The JSON-encoded payload to send. attempts (int): The number of retry attempts left. """ request = Request(url) arguments = urlencode({ 'payload': payload, }) # The addition of data automatically converts request to a POST. request.add_data(arguments) while attempts: try: return urlopen(request) except URLError: attempts -= 1 logging.warning('Sending WebHook Request failed: %s ' % url)
def check_file(self, filename, url): """Download a file and check for expected headers. This makes sure that when we fetch a file via its URL, the returned file's headers would instruct the browser to force a download, rather than view the contents inline. Args: filename (unicode): The name of the file. url (unicode): The URL of the file. Returns: bool: ``True`` if the file could be downloaded and the headers contain ``Content-Disposition: attachment``. ``False`` if the download failed for some reason or the header was not what we expected. """ # Exceptions coming from this will be caught higher up. headers = urlopen(url).info() return headers.get('Content-Disposition', '').startswith('attachment')
def check_file(self, filename, url): """Download a file and compare the resulting response to the file. This makes sure that when we fetch a file via its URL, the returned contents are identical to the file contents. This returns True if the file contents match, and False otherwise. Args: filename (unicode): The name of the file. url (unicode): The URL of the file. Returns: bool: ``True`` if the file could be downloaded (or a HTTP 403 was hit) and the contents matched the expected value. ``False`` if the download failed for some reason or the contents didn't match expectations. """ try: data = urlopen(url).read() except HTTPError as e: # An HTTP 403 is also an acceptable response if e.code == 403: return True else: raise e with self.storage.open(filename, 'r') as f: return data == f.read()
def _download_file(test_app, test_file): download_url = DJANGO_TEST_DOWNLOAD_URL.format( version=DJANGO_VERSION, test_app=test_app, test_file=test_file, ) to_dir = os.path.join( DJANGO_TEST_APP_PATH, test_app, ) if not os.path.isdir(to_dir): os.makedirs(to_dir) to_file = os.path.join( to_dir, test_file, ) if not os.path.isfile(to_file): try: with open(to_file, 'wb') as fp: res = request.urlopen(download_url) fp.write(res.read()) logger.info("Download: %s", download_url) res.close() except HTTPError: logger.warn("Not found: %s", download_url)
def get_thumbnail(cls, image, operations=None, timeout=None): operations = operations or [] url = None if isinstance(image, six.string_types): if urlsplit(image).scheme in ('http', 'https'): url = image image = six.BytesIO() image.path = url op_id = cls.op_id(operations) entries = cls.get_entries(image) if entries is None: entries = {} cached_path = entries.get(op_id) if cached_path is not None and not cls.storage.exists(cached_path): # Something in cache but no file, drop entry del entries[op_id] cached_path = None if not cached_path: img_id = hashlib.md5( force_bytes('{0}{1}'.format(image.path, repr(operations)))).hexdigest() # Open URL if needed if url: rsp = None try: rsp = urlopen(url) image.write(rsp.read()) image.seek(0) finally: if rsp: rsp.close() # Create thumbnail dest_file = ContentFile('') if hasattr(image, 'closed') and image.closed: image.open() with cls.Processor(image) as p: p.orientation() p.operations(*operations).save(dest_file) cached_path = '{0}.{1}'.format( os.path.join(img_id[0:2], img_id[2:4], img_id), p.format) cls.storage.save(cached_path, dest_file) del dest_file if hasattr(image, 'close'): image.close() entries[op_id] = cached_path cls.set_entries(image, entries, timeout) return FileWrapper(cached_path, cls.storage)
def clean_idonethis_api_token(self): """Clean and validate the 'idonethis_api_token' field. This performs a test against the I Done This authentication test endpoint to ensure that the provided API token is valid. We only care if the request is successful, so we ignore the returned user data. Returns: unicode: Validated API token with leading and trailing whitespace removed, or an empty string if the API token is empty. Raises: django.core.exceptions.ValidationError: Raised if the API token validation fails. """ api_token = self.cleaned_data['idonethis_api_token'].strip() if not api_token: return '' request = create_idonethis_request('noop', api_token) logging.debug( 'IDoneThis: Validating API token for user "%s", ' 'request "%s %s"', self.user.username, request.get_method(), request.get_full_url()) try: urlopen(request) except (HTTPError, URLError) as e: if isinstance(e, HTTPError): error_info = '%s, error data: %s' % (e, e.read()) else: error_info = e.reason logging.error( 'IDoneThis: Failed to validate API token for user ' '"%s", request "%s %s": %s', self.user.username, request.get_method(), request.get_full_url(), error_info) raise forms.ValidationError( ugettext('Error validating the API Token. Make sure the token ' 'matches your I Done This Account Settings.')) return api_token
def _check_boundary_validity(boundary_url): """Check that a given boundary URL matches a boundary on the web service.""" if not re.search(r'^/boundaries/[^/\s]+/[^/\s]+/$', boundary_url): return False try: resp = urlopen(urljoin(app_settings.BOUNDARYSERVICE_URL, boundary_url)) return resp.code == 200 except HTTPError: return False
def sendsms(self, phone, msgs): """ Send SMS by calling the assigned URL with appropriate parametres Arguments: ``phone'' is the phone number the messages are to be dispatched to. ``msgs'' is a list of messages ordered with the most severe first. Each message is a tuple with ID, text and severity of the message. Returns five values: The formatted SMS. A list of IDs of sent messages. A list of IDs of ignored messages. A boolean which is true for success and false for failure. An integer which is the sending ID if available or 0 otherwise. """ # Format SMS (sms, sent, ignored) = self.formatsms(msgs) sms = quote_plus(sms) # Format HTTP GET request get_data = {'phone': phone, 'sms': sms} url = self.url % get_data # Send SMS try: urlopen(url) result = True except HTTPError as ex: self.logger.error('HTTP error: <%s>: %s (%s).' % (ex.url, ex.msg, ex.code)) result = False smsid = 0 self.logger.debug( 'HttpGetDispatcher response: %s, %s, %s, %s, %s', sms, sent, ignored, result, smsid, ) return (sms, sent, ignored, result, smsid)
def download(self, url): logger.info("Downloading %s", url) try: content = BytesIO(urlopen(url, timeout=10).read()) img = Image.open(content) except IOError as ex: logger.error("Failed to download image: %s", str(ex)) else: filename = self.filename(url) img.save(filename)
def get_file_http(self, url, path, revision): """Return the contents of a file from an HTTP(S) URL. This is a convenience for looking up the contents of files that are referenced in diffs through an HTTP(S) request. Authentication is performed using the username and password provided (if any). Args: url (unicode): The URL to fetch the file contents from. path (unicode): The path of the file, as referenced in the diff. revision (Revision): The revision of the file, as referenced in the diff. Returns: bytes: The contents of the file. Raises: reviewboard.scmtools.errors.FileNotFoundError: The file could not be found. reviewboard.scmtools.errors.SCMError: Unexpected error in fetching the file. This may be an unexpected HTTP status code. """ logging.info('Fetching file from %s' % url) try: request = URLRequest(url) if self.username: auth_string = base64.b64encode('%s:%s' % (self.username, self.password)) request.add_header('Authorization', 'Basic %s' % auth_string) return urlopen(request).read() except HTTPError as e: if e.code == 404: logging.error('404') raise FileNotFoundError(path, revision) else: msg = "HTTP error code %d when fetching file from %s: %s" % \ (e.code, url, e) logging.error(msg) raise SCMError(msg) except Exception as e: msg = "Unexpected error fetching file from %s: %s" % (url, e) logging.error(msg) raise SCMError(msg)
def clean(self): if self.verify_recaptcha: recaptcha_response = self.cleaned_data.get('g-recaptcha-response') if not recaptcha_response: raise ValidationError([ _("You must verify that you're not a robot by clicking " "\"'I'm not a robot.\"") ]) data = urlencode({ 'secret': settings.RECAPTCHA_PRIVATE_KEY, 'response': recaptcha_response, 'remote-ip': self.request.META.get('REMOTE_ADDR'), }) try: resp = urlopen( 'https://www.google.com/recaptcha/api/siteverify', data) payload = resp.read() except URLError as e: logging.exception('Could not make reCAPTCHA request: HTTP %s: ' '%s', e.code, e.read()) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) try: payload = json.loads(payload) except ValueError: logging.exception('Could not parse JSON payload from %r', payload) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) try: if not payload['success']: raise ValidationError([ _('Invalid reCAPTCHA response.'), ]) except KeyError: logging.exception('No "success" key in reCAPTCHA payload %r', payload) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) return super(RecaptchaFormMixin, self).clean()
def http_request(self, url, body=None, headers=None, method='GET', username=None, password=None): """Perform some HTTP operation on a given URL. If the ``username`` and ``password`` arguments are provided, the headers required for HTTP Basic Authentication will be added to the request. Args: url (unicode): The URL to open. body (unicode, optional): The request body. headers (dict, optional): Headers to include in the request. method (unicode, optional): The HTTP method to use to perform the request. username (unicode, optional): The username to use for HTTP Basic Authentication. password (unicode, optional): The password to use for HTTP Basic Authentication. Returns: tuple: A tuple of: * The response body (:py:class:`bytes`) * The response headers (:py:class:`dict`) Raises: urllib2.HTTPError: When the HTTP request fails. urllib2.URLError: When there is an error communicating with the URL. """ request = URLRequest(url, body, headers, method=method) if username is not None and password is not None: request.add_basic_auth(username, password) response = urlopen(request) return response.read(), response.headers
def clean(self): if self.verify_recaptcha: recaptcha_response = self.cleaned_data.get('g-recaptcha-response') if not recaptcha_response: raise ValidationError([ _("You must verify that you're not a robot by clicking " "\"'I'm not a robot.\"") ]) data = urlencode({ 'secret': settings.RECAPTCHA_PRIVATE_KEY, 'response': recaptcha_response, 'remote-ip': self.request.META.get('REMOTE_ADDR'), }) try: resp = urlopen( 'https://www.google.com/recaptcha/api/siteverify', data) payload = resp.read() except URLError as e: logging.exception( 'Could not make reCAPTCHA request: HTTP %s: ' '%s', e.code, e.read()) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) try: payload = json.loads(payload) except ValueError: logging.exception('Could not parse JSON payload from %r', payload) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) try: if not payload['success']: raise ValidationError([ _('Invalid reCAPTCHA response.'), ]) except KeyError: logging.exception('No "success" key in reCAPTCHA payload %r', payload) raise ValidationError([ _('Could not validate reCAPTCHA. Please contact an ' 'administrator.'), ]) return super(RecaptchaFormMixin, self).clean()
def dispatch(request, handlers, event, payload): """Dispatch the given event and payload to the given handlers.""" encoder = BasicAPIEncoder() adapter = JSONEncoderAdapter(encoder) body = adapter.encode(payload, request=request) body = body.encode('utf-8') headers = { 'X-ReviewBoard-Event': event, 'Content-Type': 'application/json', 'Content-Length': len(body), } for handler in handlers: signer = hmac.new(handler.secret.encode('utf-8'), body) headers['X-ReviewBoard-Signature'] = signer.hexdigest() logging.info('Dispatching webhook for event %s to %s', event, handler.url) urlopen(Request(handler.url, body, headers))
def download_and_compare(self, to_download): try: data = urlopen(_get_url(self.directory) + to_download).read() except HTTPError as e: # An HTTP 403 is also an acceptable response if e.code == 403: return True else: raise e with self.storage.open(to_download, 'r') as f: return data == f.read()
def get_list_of_boundaries(self): if not self.boundary_set: return {} set_url = app_settings.BOUNDARYSERVICE_URL + 'boundaries/' + self.boundary_set + '/?limit=0' boundaries = [] while set_url: set_data = json.loads(urlopen(set_url).read().decode()) boundaries.extend(set_data['objects']) if set_data['meta'].get('next'): set_url = urljoin(app_settings.BOUNDARYSERVICE_URL, set_data['meta']['next']) else: return boundaries
def notify(self, text, fields): """Send a webhook notification to Slack.""" payload = { 'username': self.settings['notify_username'], 'icon_url': 'http://images.reviewboard.org/rbslack/logo.png', 'attachments': [ { 'color': '#efcc96', 'fallback': text, 'fields': fields, }, ], } channel = self.settings['channel'] if channel: payload['channel'] = channel urlopen(Request(self.settings['webhook_url'], json.dumps(payload)))
def fetch_feed(): import feedparser data = urlopen(url).read() parser = feedparser.parse(data) context = { 'parser': parser, } context.update(extra_context) return render_to_string(template_name, context, request)
def get_github(self, data=None): if not data: raw_data = json.load(urlopen(GITHUB_REPOS)) else: raw_data = data humans = [] for contributor in raw_data: human = Human() human.name = contributor.get('name', contributor['login']) human.website = contributor.get('blog', None) humans.append(human) return humans
def get_config(self): """Return the Travis CI server's config. Returns: dict: The parsed contents of the JSON response. Raises: urllib2.URLError: The HTTP request failed. """ # This request can't go through _make_request because this endpoint # isn't available with API version 3 and doesn't require # authentication. u = urlopen(URLRequest('%s/config' % self.endpoint)) return json.loads(u.read())
def call(self, method, **params): """ Call the DISQUS API and return the json response. URLError is raised when the request failed. DisqusException is raised when the query didn't succeed. """ url = self.api_url % method request = self._get_request(url, self.METHODS[method], **params) try: response = urlopen(request) except URLError: raise else: response_json = json.loads(response.read()) if not response_json['succeeded']: raise DisqusException(response_json['message']) return response_json['message']
def _make_request(self, url, body=None, method='GET', content_type='application/json'): """Make an HTTP request. Args: url (unicode): The URL to make the request against. body (unicode or bytes, optional): The content of the request. method (unicode, optional): The request method. If not provided, it defaults to a ``GET`` request. content_type (unicode, optional): The type of the content being POSTed. Returns: bytes: The contents of the HTTP response body. Raises: urllib2.URLError: The HTTP request failed. """ logger.debug('Making request to Travis CI %s', url) headers = { 'Accept': 'application/json', 'Authorization': 'token %s' % self.token, 'Travis-API-Version': '3', } if content_type: headers['Content-Type'] = content_type request = URLRequest( url, body=body, method=method, headers=headers) u = urlopen(request) return u.read()