def get_absolute_url(self): # sorting by highest level, so that the canonical URL # associates with the most generic category cms_page = self.cms_pages.order_by('depth').last() if cms_page is None: return urljoin('category-not-assigned', self.slug) return urljoin(cms_page.get_absolute_url(), self.slug)
def assert_redirects(response, expected_url, status_code=302, target_status_code=200, fetch_redirect_response=True): """ Naive reimplementation of Django's assertRedirects. Compatible with pytest and should preserve its better error reporting. https://docs.djangoproject.com/en/1.10/_modules/django/test/testcases/#SimpleTestCase.assertRedirects """ assert response.status_code == status_code, \ "got status=%s instead of %s" % (response.status_code, status_code) url = response.url scheme, netloc, path, query, fragment = urlsplit(url) # Prepend path for relative redirects. if not path.startswith('/'): url = urljoin(response.request['PATH_INFO'], url) path = urljoin(response.request['PATH_INFO'], path) assert url == expected_url, \ "got url=%s instead of %s" % (url, expected_url) if fetch_redirect_response: redirect_response = response.client.get(path, QueryDict(query), secure=(scheme == 'https')) assert redirect_response.status_code == target_status_code, \ "got status=%s instead of %s" % (redirect_response.status_code, target_status_code)
def index(request, uri): """ Proxies render requests to graphite-web, as configured in graphite.conf """ base = CONFIG.get('graphiteweb', 'base') if request.method in ('GET', 'HEAD'): query = _inject_default_arguments(request.GET) url = urljoin(base, uri + ('?' + query) if query else '') req = Request(url) elif request.method == 'POST': data = _inject_default_arguments(request.POST) url = urljoin(base, uri) req = Request(url, data) else: return HttpResponseNotAllowed(['GET', 'POST', 'HEAD']) LOGGER.debug("proxying request to %r", url) proxy = urlopen(req) headers = proxy.info() content_type = headers.getheader('Content-Type', 'text/html') if request.method == 'HEAD': response = HttpResponse(content_type=content_type) response['Content-Length'] = headers.getheader('Content-Length', '0') else: response = HttpResponse(proxy.read(), content_type=content_type) response['X-Where-Am-I'] = request.get_full_path() return response
def url(self, name): provider_type = self.provider['type'].lower() obj = self._get_object(name) if not obj: return None try: url = self.driver.get_object_cdn_url(obj) except NotImplementedError as e: object_path = '%s/%s' % (self.bucket, obj.name) if 's3' in provider_type: base_url = 'https://%s' % self.driver.connection.host url = urljoin(base_url, object_path) elif 'google' in provider_type: url = urljoin('https://storage.googleapis.com', object_path) elif 'azure' in provider_type: base_url = ('https://%s.blob.core.windows.net' % self.provider['user']) url = urljoin(base_url, object_path) else: raise e if 'local' in provider_type and settings.MEDIA_ROOT != "" and settings.MEDIA_ROOT in url: rel_object_path = url.split(settings.MEDIA_ROOT)[-1] if rel_object_path.startswith('/'): rel_object_path = rel_object_path[1:] object_path = urljoin(settings.MEDIA_URL, rel_object_path) if object_path.startswith('/'): object_path = object_path[1:] url = "http://{0}/{1}".format(self._get_current_site_domain(), object_path) return url
def render(self, context): path = self._path.resolve(context) basename, ext = os.path.splitext(path) filename = find_file(path) if ext not in self._sass_exts: # return the given path, since it ends neither in `.scss` nor in `.sass` return urljoin(self.prefix, path) # compare timestamp of sourcemap file with all its dependencies, and check if we must recompile css_filename = basename + '.css' url = urljoin(self.prefix, css_filename) if not getattr(settings, 'SASS_PROCESSOR_ENABLED', settings.DEBUG): return url sourcemap_filename = css_filename + '.map' if self.is_latest(sourcemap_filename): return url # otherwise compile the SASS/SCSS file into .css and store it sourcemap_url = self.storage.url(sourcemap_filename) content, sourcemap = sass.compile(filename=filename, source_map_filename=sourcemap_url, include_paths=self.include_paths) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) if self.storage.exists(sourcemap_filename): self.storage.delete(sourcemap_filename) self.storage.save(sourcemap_filename, ContentFile(sourcemap)) return url
def test_search_for_user_with_username_filter(self): """ Test POST /Users/.search/?filter=userName eq "" """ url = reverse('scim:users-search') body = json.dumps({ 'schemas': ['urn:ietf:params:scim:api:messages:2.0:SearchRequest'], 'filter': 'userName eq ""', }) resp = self.client.post(url, body, content_type='application/scim+json') self.assertEqual(resp.status_code, 200, resp.content.decode()) location = urljoin(get_base_scim_location_getter()(), '/scim/v2/') location = urljoin(location, 'Users/.search') self.assertEqual(resp['Location'], location) result = json.loads(resp.content.decode()) expected = { "schemas": ["urn:ietf:params:scim:api:messages:2.0:ListResponse"], "totalResults": 0, "itemsPerPage": 50, "startIndex": 1, "Resources": [], } self.assertEqual(expected, result)
def test_search_for_user_with_username_filter(self): """ Test POST /Users/.search/?filter=userName eq "" """ url = reverse('scim:users-search') body = json.dumps({ 'schemas': [constants.SchemaURI.SERACH_REQUEST], 'filter': 'userName eq ""', }) resp = self.client.post(url, body, content_type=constants.SCIM_CONTENT_TYPE) self.assertEqual(resp.status_code, 200, resp.content.decode()) location = urljoin(get_base_scim_location_getter()(), '/scim/v2/') location = urljoin(location, 'Users/.search') self.assertEqual(resp['Location'], location) result = json.loads(resp.content.decode()) expected = { "schemas": [constants.SchemaURI.LIST_RESPONSE], "totalResults": 0, "itemsPerPage": 50, "startIndex": 1, "Resources": [], } self.assertEqual(expected, result)
def render(self, context, path=None): if path is None: path = self._path.resolve(context) basename, ext = os.path.splitext(path) filename = find_file(path) if filename is None: raise TemplateSyntaxError( 'Unable to locate file {path} while rendering template {template}'.format( path=path, template=self.source_file ) ) if ext not in self._sass_exts: # return the given path, since it ends neither in `.scss` nor in `.sass` return urljoin(self.prefix, path) # compare timestamp of sourcemap file with all its dependencies # and check if we must recompile css_filename = basename + '.css' url = urljoin(self.prefix, css_filename) if not getattr(settings, 'SASS_PROCESSOR_ENABLED', settings.DEBUG): return url sourcemap_filename = css_filename + '.map' if self.is_latest(sourcemap_filename): return url # with offline compilation, raise an error, if css file could not be found. if sass is None: raise ImproperlyConfigured("Offline compiled file `{}` is missing and libsass has not been installed.".format(css_filename)) # add a functions to be used from inside SASS custom_functions = {'get-setting': get_setting} # otherwise compile the SASS/SCSS file into .css and store it sourcemap_url = self.storage.url(sourcemap_filename) compile_kwargs = { 'filename': filename, 'source_map_filename': sourcemap_url, 'include_paths': self.include_paths, 'custom_functions': custom_functions, } if self.sass_precision: compile_kwargs['precision'] = self.sass_precision if self.sass_output_style: compile_kwargs['output_style'] = self.sass_output_style content, sourcemap = sass.compile(**compile_kwargs) content = force_bytes(content) sourcemap = force_bytes(sourcemap) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) if self.storage.exists(sourcemap_filename): self.storage.delete(sourcemap_filename) self.storage.save(sourcemap_filename, ContentFile(sourcemap)) return url
def get_file_system_storage(self, account=None): location = settings.MEDIA_ROOT base_url = settings.MEDIA_URL bucket_name = self.get_bucket_name(account) if bucket_name: location = os.path.join(location, bucket_name) base_url = urljoin(base_url, bucket_name + '/') prefix = self.get_media_prefix(account) if prefix: location = os.path.join(location, prefix) base_url = urljoin(base_url, prefix) return FileSystemStorage(location=location, base_url=base_url)
def get_absolute_url(self): """ Return the absolute URL of a product """ # sorting by highest level, so that the canonical URL # associates with the most generic category if LooseVersion(CMS_VERSION) < LooseVersion('3.5'): cms_page = self.cms_pages.order_by('depth').last() else: cms_page = self.cms_pages.order_by('node__path').last() if cms_page is None: return urljoin('/category-not-assigned/', self.slug) return urljoin(cms_page.get_absolute_url(), self.slug)
def __call__(self, path): basename, ext = os.path.splitext(path) filename = find_file(path) if filename is None: raise FileNotFoundError( "Unable to locate file {path}".format(path=path)) if ext not in self.sass_extensions: # return the given path, since it ends neither in `.scss` nor in `.sass` return urljoin(self.prefix, path) # compare timestamp of sourcemap file with all its dependencies, and check if we must recompile css_filename = basename + '.css' url = urljoin(self.prefix, css_filename) if not self.processor_enabled: return url sourcemap_filename = css_filename + '.map' if find_file(css_filename) and self.is_latest(sourcemap_filename): return url # with offline compilation, raise an error, if css file could not be found. if sass is None: msg = "Offline compiled file `{}` is missing and libsass has not been installed." raise ImproperlyConfigured(msg.format(css_filename)) # add a function to be used from inside SASS custom_functions = {'get-setting': get_setting} # otherwise compile the SASS/SCSS file into .css and store it sourcemap_url = self.storage.url(sourcemap_filename) compile_kwargs = { 'filename': filename, 'source_map_filename': sourcemap_url, 'include_paths': self.include_paths + APPS_INCLUDE_DIRS, 'custom_functions': custom_functions, } if self.sass_precision: compile_kwargs['precision'] = self.sass_precision if self.sass_output_style: compile_kwargs['output_style'] = self.sass_output_style content, sourcemap = sass.compile(**compile_kwargs) content = force_bytes(content) sourcemap = force_bytes(sourcemap) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) if self.storage.exists(sourcemap_filename): self.storage.delete(sourcemap_filename) self.storage.save(sourcemap_filename, ContentFile(sourcemap)) return url
def __call__(self, path): basename, ext = os.path.splitext(path) filename = find_file(path) if filename is None: raise FileNotFoundError("Unable to locate file {path}".format(path=path)) if ext not in self.sass_extensions: # return the given path, since it ends neither in `.scss` nor in `.sass` return urljoin(self.prefix, path) # compare timestamp of sourcemap file with all its dependencies, and check if we must recompile css_filename = basename + '.css' url = urljoin(self.prefix, css_filename) if not self.processor_enabled: return url sourcemap_filename = css_filename + '.map' if find_file(css_filename) and self.is_latest(sourcemap_filename): return url # with offline compilation, raise an error, if css file could not be found. if sass is None: msg = "Offline compiled file `{}` is missing and libsass has not been installed." raise ImproperlyConfigured(msg.format(css_filename)) # add a function to be used from inside SASS custom_functions = {'get-setting': get_setting} # otherwise compile the SASS/SCSS file into .css and store it sourcemap_url = self.storage.url(sourcemap_filename) compile_kwargs = { 'filename': filename, 'source_map_filename': sourcemap_url, 'include_paths': self.include_paths + APPS_INCLUDE_DIRS, 'custom_functions': custom_functions, } if self.sass_precision: compile_kwargs['precision'] = self.sass_precision if self.sass_output_style: compile_kwargs['output_style'] = self.sass_output_style content, sourcemap = sass.compile(**compile_kwargs) content = force_bytes(content) sourcemap = force_bytes(sourcemap) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) if self.storage.exists(sourcemap_filename): self.storage.delete(sourcemap_filename) self.storage.save(sourcemap_filename, ContentFile(sourcemap)) return url
def index(request, uri): """ Proxies render requests to graphite-web, as configured in graphite.conf """ base = CONFIG.get('graphiteweb', 'base') if request.method in ('GET', 'HEAD'): query = _inject_default_arguments(request.GET) url = urljoin(base, uri + ('?' + query) if query else '') req = Request(url) data = None elif request.method == 'POST': data = _inject_default_arguments(request.POST).encode('utf-8') url = urljoin(base, uri) req = Request(url, data) else: return HttpResponseNotAllowed(['GET', 'POST', 'HEAD']) _logger.debug("proxying request to %r", url) try: proxy = urlopen(req) except HTTPError as error: status = error.code headers = error.hdrs output = error.fp.read() _logger.error( "%s error on graphite render request: " "%r with arguments: %r", status, url, data, ) else: status = proxy.getcode() headers = proxy.info() output = proxy.read() content_type = headers.get('Content-Type', 'text/html') if request.method == 'HEAD': response = HttpResponse(content_type=content_type, status=status) response['Content-Length'] = headers.get('Content-Length', '0') else: response = HttpResponse(output, content_type=content_type, status=status) response['X-Where-Am-I'] = request.get_full_path() return response
def get_object_url(self, object_data=None, object_id=None, object_hash=None): ''' Return the object URL based on the id of the object from the storage table ''' object_details = None # get the object from the database try: if object_data is not None: object_details = object_data elif object_id is not None: object_details = FileStorage.objects.get(id=object_id) elif object_hash is not None: object_details = FileStorage.objects.get( hashed_name=object_hash) except Exception as e: error = "No object found" + str(e) print(error) # connect to the object bucket if self.mount_driver_from_bucket_name( bucket_name=object_details.bucket_name): print('bucket connected') else: error = "No bucket found" print(error) # get the object object_blob = self.driver.get_object( container_name=self.bucket.name, object_name=object_details.hashed_name) # get the object URL try: url = self.driver.get_object_cdn_url(object_blob) except NotImplementedError as e: object_path = '{}/{}'.format(self.bucket.name, object_blob.name) if 's3' in self.provider['type']: base_url = 'https://%s' % self.driver.connection.host url = urljoin(base_url, object_path) elif 'google' in self.provider['type']: url = urljoin('https://storage.googleapis.com', object_path) else: raise e # return the URL return url
def test_normal(self): rendered = self._render() expected_url = urljoin(settings.STATIC_URL, 'SJ/myapp/main.js') self.assertEqual( rendered, """<script type="text/javascript" src="{0}"></script>""".format( expected_url))
def get_metric_data(target, start="-5min", end="now"): """ Retrieves raw datapoints from a graphite target for a given period of time. :param target: A metric path string or a list of multiple metric paths :param start: A start time specification that Graphite will accept. :param end: An end time specification that Graphite will accept. :returns: A raw, response from Graphite. Normally a list of dicts that represent the names and datapoints of each matched target, like so:: [{'target': 'x', 'datapoints': [(value, timestamp), ...]}] """ if not target: return [] # no point in wasting time on http requests for no data base = CONFIG.get("graphiteweb", "base") url = urljoin(base, "/render/") # What does Graphite accept of formats? Lets check if the parameters are # datetime objects and try to force a format then if isinstance(start, datetime): start = start.strftime('%H:%M%Y%m%d') if isinstance(end, datetime): end = end.strftime('%H:%M%Y%m%d') query = { 'target': target, 'from': start, 'until': end, 'format': 'json', } query = urlencode(query, True) _logger.debug("get_metric_data%r", (target, start, end)) req = Request(url, data=query.encode('utf-8')) try: response = urlopen(req) json_data = json.load(codecs.getreader('utf-8')(response)) _logger.debug("get_metric_data: returning %d results", len(json_data)) return json_data except HTTPError as err: _logger.error("Got a 500 error from graphite-web when fetching %s" "with data %s", err.url, query) _logger.error("Graphite output: %s", err.fp.read()) raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except URLError as err: raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except ValueError: # response could not be decoded return [] finally: try: response.close() except NameError: pass
def send(self, method, **payload): # blog is the only parameter required by all API endpoints if 'blog' not in payload: scheme = 'https' if self.ssl else 'http' payload['blog'] = u'%s://%s/' % (scheme, self.domain) url = urljoin(self.url, method) return self.session.post(url, data=payload)
def _created_proxy_response(self, request, path): request_payload = request.body request_headers = self.get_proxy_request_headers(request) self.log.debug("Request headers: %s", request_headers) request_url = urljoin( self.upstream, quote_plus(path.encode('utf8'), QUOTE_SAFE) ) self.log.debug("Request URL: %s", request_url) if request.GET: get_data = encode_items(request.GET.lists()) request_url += '?' + urlencode(get_data) self.log.debug("Request URL: %s", request_url) try: proxy_response = self.http.urlopen(request.method, request_url, redirect=False, retries=self.retries, headers=request_headers, body=request_payload, decode_content=False, preload_content=False) self.log.debug("Proxy response header: %s", proxy_response.getheaders()) except urllib3.exceptions.HTTPError as error: self.log.exception(error) raise return proxy_response
def get_context_data(self, **kwargs): context = super(ElementBaseView, self).get_context_data(**kwargs) context["element"] = self.element context["static_root"] = urljoin( PrefixNode.handle_simple("STATIC_URL"), self.element.aspect.relative_path) return context
def get_absolute_url(self): url = super(Order, self).get_absolute_url() if self.token: if not url.endswith('/'): url += '/' url = urljoin(url, self.token) return url
def get_link(self, path, method, base_url): fields = self.get_path_fields(path, method) fields += self.get_serializer_fields(path, method) fields += self.get_pagination_fields(path, method) fields += self.get_filter_fields(path, method) manual_fields = self.get_manual_fields(path, method) fields = self.update_fields(fields, manual_fields) if fields and any([field.location in ('form', 'body') for field in fields]): encoding = self.get_encoding(path, method) else: encoding = None description = self.get_description(path, method) if base_url and path.startswith('/'): path = path[1:] return coreapi.Link( url=urlparse.urljoin(base_url, path), action=method.lower(), encoding=encoding, fields=fields, description=description )
def get_object(self): user = self.request.user manual_redirect_uri = self.request.auth_data.pop('redirect_uri', None) manual_redirect_uri = self.get_redirect_uri(manual_redirect_uri) if manual_redirect_uri: self.request.backend.redirect_uri = manual_redirect_uri elif DOMAIN_FROM_ORIGIN: origin = self.request.strategy.request.META.get('HTTP_ORIGIN') if origin: relative_path = urlparse(self.request.backend.redirect_uri).path url = urlparse(origin) origin_scheme_host = "%s://%s" % (url.scheme, url.netloc) location = urljoin(origin_scheme_host, relative_path) self.request.backend.redirect_uri = iri_to_uri(location) is_authenticated = user_is_authenticated(user) user = is_authenticated and user or None # skip checking state by setting following params to False # it is responsibility of front-end to check state # TODO: maybe create an additional resource, where front-end will # store the state before making a call to oauth provider # so server can save it in session and consequently check it before # sending request to acquire access token. # In case of token authentication we need a way to store an anonymous # session to do it. self.request.backend.REDIRECT_STATE = False self.request.backend.STATE_PARAMETER = False user = self.request.backend.complete(user=user) return user
def safe_join(base, *paths): """ A version of django.utils._os.safe_join for S3 paths. Joins one or more path components to the base path component intelligently. Returns a normalized version of the final path. The final path must be located inside of the base path component (otherwise a ValueError is raised). Paths outside the base path indicate a possible security sensitive operation. """ base_path = force_text(base) base_path = base_path.rstrip('/') paths = [force_text(p) for p in paths] final_path = base_path for path in paths: final_path = urlparse.urljoin(final_path.rstrip('/') + "/", path) # Ensure final_path starts with base_path and that the next character after # the final path is '/' (or nothing, in which case final_path must be # equal to base_path). base_path_len = len(base_path) if (not final_path.startswith(base_path) or final_path[base_path_len:base_path_len + 1] not in ('', '/')): raise ValueError('the joined path is located outside of the base path' ' component') return final_path.lstrip('/')
def test_macro_sources_empty_macro_list(mock_requests): """When KumaScript can't return macros, the sources are empty.""" macros_url = urljoin(KUMASCRIPT_BASE_URL, 'macros/') response = {'can_list_macros': False, 'loader': 'HTTPLoader', 'macros': []} mock_requests.get(macros_url, json=response) macros = kumascript.macro_sources() assert macros == {}
def perform_ping(started, server=DEFAULT_SERVER_URL): url = urljoin(server, "/api/v1/pingback") instance, _ = InstanceIDModel.get_or_create_current_instance() language = get_device_setting("language_id", "") try: timezone = get_current_timezone().zone except Exception: timezone = "" data = { "instance_id": instance.id, "version": kolibri.__version__, "mode": conf.OPTIONS["Deployment"]["RUN_MODE"], "platform": instance.platform, "sysversion": instance.sysversion, "database_id": instance.database.id, "system_id": instance.system_id, "node_id": instance.node_id, "language": language, "timezone": timezone, "uptime": int( (datetime.datetime.now() - started).total_seconds() / 60), "timestamp": localtime(), "installer": installation_type(), } logger.debug("Pingback data: {}".format(data)) jsondata = dump_zipped_json(data) response = requests.post(url, data=jsondata, timeout=60) response.raise_for_status() return json.loads(response.content.decode() or "{}")
def theme(self): theme = list(self.registered_hooks)[0].theme # some validation and initialization _initFields(theme) _validateMetadata(theme) _validateBrandColors(theme) # set up cache busting bust = "?" + self.cacheKey if _isSet(theme, [SIGN_IN, BACKGROUND]): theme[SIGN_IN][BACKGROUND] += bust if _isSet(theme, [SIGN_IN, TOP_LOGO, IMG_SRC]): theme[SIGN_IN][TOP_LOGO][IMG_SRC] += bust if _isSet(theme, [SIDE_NAV, TOP_LOGO, IMG_SRC]): theme[SIDE_NAV][TOP_LOGO][IMG_SRC] += bust if _isSet(theme, [APP_BAR, TOP_LOGO, IMG_SRC]): theme[APP_BAR][TOP_LOGO][IMG_SRC] += bust # if a background image has been locally set using the `manage background` command, use it bg_img = os.path.join(settings.MEDIA_ROOT, DEFAULT_BG_IMAGE_FILE) if os.path.exists(bg_img): theme[SIGN_IN][BACKGROUND] = parse.urljoin(settings.MEDIA_URL, DEFAULT_BG_IMAGE_FILE) # add cache busting md5_file = os.path.join(settings.MEDIA_ROOT, DEFAULT_BG_MD5_FILE) if os.path.exists(md5_file): with open(md5_file) as f: theme[SIGN_IN][BACKGROUND] += "?{}".format(f.read()) return theme
def get_stylesheet_url(self): icon_font_url = os.path.relpath( app_settings.CMSPLUGIN_CASCADE['icon_font_root'], settings.MEDIA_ROOT) name = self.config_data.get('name') or 'fontello' parts = (icon_font_url, self.font_folder, 'css/{}.css'.format(name)) return urljoin(settings.MEDIA_URL, '/'.join(parts))
def get_link(self, path, method, base_url): fields = self.get_path_fields(path, method) fields += self.get_serializer_fields(path, method) fields += self.get_pagination_fields(path, method) fields += self.get_filter_fields(path, method) if self._manual_fields is not None: by_name = {f.name: f for f in fields} for f in self._manual_fields: by_name[f.name] = f fields = list(by_name.values()) if fields and any([field.location in ('form', 'body') for field in fields]): encoding = self.get_encoding(path, method) else: encoding = None description = self.get_description(path, method) if base_url and path.startswith('/'): path = path[1:] return coreapi.Link( url=urlparse.urljoin(base_url, path), action=method.lower(), encoding=encoding, fields=fields, description=description )
def _convert_scss(self): basename, ext = os.path.splitext(self.path) css_filename = self.path.replace(".scss", ".css") url = urljoin(self.prefix, css_filename) if not settings.DEBUG: return url if ext not in self.scss_extensions: return static(self.path) # Compilation on the fly compile_args = { "filename": find_file(self.path), "include_paths": settings.SASS_INCLUDE_FOLDERS, } if settings.SASS_PRECISION: compile_args["precision"] = settings.SASS_PRECISION content = sass.compile(**compile_args) content = force_bytes(content) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) return url
def get_repository_hook_instructions(self, request, repository): """Returns instructions for setting up incoming webhooks.""" plan = repository.extra_data["repository_plan"] add_webhook_url = urljoin( self.account.hosting_url or "https://github.com/", "%s/%s/settings/hooks/new" % ( self._get_repository_owner_raw(plan, repository.extra_data), self._get_repository_name_raw(plan, repository.extra_data), ), ) webhook_endpoint_url = build_server_url( local_site_reverse( "github-hooks-close-submitted", local_site=repository.local_site, kwargs={"repository_id": repository.pk, "hosting_service_id": repository.hosting_account.service_name}, ) ) return render_to_string( "hostingsvcs/github/repo_hook_instructions.html", RequestContext( request, { "repository": repository, "server_url": get_server_url(), "add_webhook_url": add_webhook_url, "webhook_endpoint_url": webhook_endpoint_url, "hook_uuid": repository.get_or_create_hooks_uuid(), }, ), )
def raw_metric_query(query): """Runs a query for metric information against Graphite's REST API. :param query: A search string, e.g. "nav.devices.some-gw_example_org.*" :returns: A list of matching metrics, each represented by a dict. """ base = CONFIG.get("graphiteweb", "base") url = urljoin(base, "/metrics/find") query = urlencode({'query': query}) url = "%s?%s" % (url, query) req = Request(url) try: response_data = urlopen(req).read().decode('utf-8') return json.loads(response_data) except URLError as err: raise errors.GraphiteUnreachableError( "{0} is unreachable".format(base), err) except ValueError: # response could not be decoded return [] finally: try: response.close() except NameError: pass
def _build_project_api_url(self, repository, rest_parts, query=None): """Return an API URL for the Gerrit projects API. Args: repository (reviewboard.scmtools.models.Repository): The repository configured to use Gerrit. rest_parts (iterable): The rest of the URL parts. **query (dict, optional): The query parameters to append to the URL. Returns: unicode: The full URL. """ parts = [ 'a', 'projects', quote_plus(repository.extra_data['gerrit_project_name']), ] parts.extend(rest_parts) url = urljoin(repository.extra_data['gerrit_url'], '/'.join(parts)) if query: url = '%s/?%s' % (url, urlencode(query)) else: url = '%s/' % url return url
def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip("/") return urljoin(self.base_url, url)
def get_object(self): user = self.request.user manual_redirect_uri = self.request.auth_data.pop('redirect_uri', None) manual_redirect_uri = self.get_redirect_uri(manual_redirect_uri) if manual_redirect_uri: self.request.backend.redirect_uri = manual_redirect_uri elif DOMAIN_FROM_ORIGIN: origin = self.request.strategy.request.META.get('HTTP_ORIGIN') if origin: relative_path = urlparse( self.request.backend.redirect_uri).path url = urlparse(origin) origin_scheme_host = "%s://%s" % (url.scheme, url.netloc) location = urljoin(origin_scheme_host, relative_path) self.request.backend.redirect_uri = iri_to_uri(location) is_authenticated = user_is_authenticated(user) user = is_authenticated and user or None # skip checking state by setting following params to False # it is responsibility of front-end to check state # TODO: maybe create an additional resource, where front-end will # store the state before making a call to oauth provider # so server can save it in session and consequently check it before # sending request to acquire access token. # In case of token authentication we need a way to store an anonymous # session to do it. self.request.backend.REDIRECT_STATE = False self.request.backend.STATE_PARAMETER = False user = self.request.backend.complete(user=user) return user
def test_macro_sources(mock_requests): """When KumaScript returns macros, the sources are populated.""" macros_url = urljoin(KUMASCRIPT_BASE_URL, 'macros/') response = { 'can_list_macros': True, 'loader': 'FileLoader', 'macros': [ { 'filename': 'A11yRoleQuicklinks.ejs', 'name': 'A11yRoleQuicklinks' }, { 'filename': 'APIFeatureList.ejs', 'name': 'APIFeatureList' }, { # Normal form D, common on OSX 'filename': u'traduccio\u0301n.ejs', 'name': u'traduccio\u0301n' } ] } mock_requests.get(macros_url, json=response) macros = kumascript.macro_sources() expected = { 'A11yRoleQuicklinks': 'A11yRoleQuicklinks.ejs', 'APIFeatureList': 'APIFeatureList.ejs', # Normal form C, used on GitHub, ElasticSearch u'traducci\xf3n': u'traducci\xf3n.ejs', } assert macros == expected
def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url)
def save(self, name, content): files = {'file': content} r = requests.post(urljoin(self.manage_url, 'upload'), files=files) r.raise_for_status() element = html.document_fromstring(r.content) h1 = element.xpath('//h1')[0] name = h1.text.split()[1] return name
def url(self, name): """ Returns an absolute URL where the file's contents can be accessed directly by a Web browser. """ if self.aws_s3_public_url: return urljoin(self.aws_s3_public_url, filepath_to_uri(name)) return self._generate_url(name)
def _logout_url(request, next_page): """ Returns a CAS logout URL """ logout_url = urljoin(settings.CAS_SERVER_URL, 'logout') if next_page: logout_url += '?' + urlencode({'url': _service(request) + next_page}) return logout_url
def _setup(self): prefix = settings.MINIATURE_THUMBNAIL_PATH if prefix.endswith('/'): prefix = prefix[0:-1] base_path = os.path.join(settings.MEDIA_ROOT, prefix) base_url = urljoin(settings.MEDIA_URL, '{0}/'.format(prefix)) self._wrapped = get_storage_class()(location=base_path, base_url=base_url)
def __init__(self, *args, **kwargs): from django.conf import settings self.admin_static_prefix = urljoin(settings.STATIC_URL, 'admin/') # We set self.path to avoid crashes in log_message() on unsupported # requests (like "OPTIONS"). self.path = '' self.style = color_style() super(WSGIRequestHandler, self).__init__(*args, **kwargs)
def forwards(self, orm): for obj in itertools.chain(orm.RepresentativeSet.objects.all(), orm.Election.objects.all()): obj.data_about_url = u'https://scraperwiki.com/scrapers/%s/' % obj.scraperwiki_name obj.data_url = urljoin('https://api.scraperwiki.com/api/1.0/', 'datastore/sqlite') + '?' + urlencode({ 'format': 'jsondict', 'name': obj.scraperwiki_name, 'query': 'select * from swdata' }) obj.save()
def get_redirect_url(self, *args, **kwargs): url = super(StripeProcessorRedirectView, self).get_redirect_url( *args, **kwargs) if settings.PROCESSOR_REDIRECT_CALLABLE: from saas.compat import import_string func = import_string(settings.PROCESSOR_REDIRECT_CALLABLE) redirect_url_end_point = func(kwargs.get(self.slug_url_kwarg)) url = urljoin(redirect_url_end_point, url) return url
def _check_boundary_validity(boundary_url): """Check that a given boundary URL matches a boundary on the web service.""" if not re.search(r'^/boundaries/[^/\s]+/[^/\s]+/$', boundary_url): return False try: resp = urlopen(urljoin(app_settings.BOUNDARYSERVICE_URL, boundary_url)) return resp.code == 200 except HTTPError: return False
def test_script_tag_attributes(self): template = """{% load system_tags %}{% systemjs_import 'myapp/main' async foo="bar" %}""" template = django_engine.from_string(template) rendered = template.render(self.context) expected_url = urljoin(settings.STATIC_URL, 'SJ/myapp/main.js') self.assertHTMLEqual( rendered, """<script async foo="bar" type="text/javascript" src="{0}"></script>""".format(expected_url) )
def normalize(self, content, location): self.base_src = urljoin(self.root_url, location) if not self.base_src.endswith('/'): self.base_src += '/' for pattern, repl in self.patterns: rep_func = lambda m: self.replace(m, repl) content = pattern.sub(rep_func, content) return content
def render(self, context): path = self._path.resolve(context) basename, ext = os.path.splitext(path) filename = find_file(path) if filename is None: msg = "Unable to locate file {0} while rendering template {1}".format(path, self.source[0].name) raise TemplateSyntaxError(msg) if ext not in self._sass_exts: # return the given path, since it ends neither in `.scss` nor in `.sass` return urljoin(self.prefix, path) # compare timestamp of sourcemap file with all its dependencies, and check if we must recompile css_filename = basename + '.css' url = urljoin(self.prefix, css_filename) if not getattr(settings, 'SASS_PROCESSOR_ENABLED', settings.DEBUG): return url sourcemap_filename = css_filename + '.map' if self.is_latest(sourcemap_filename): return url # add a functions to be used from inside SASS custom_functions = {'get-setting': get_setting} # otherwise compile the SASS/SCSS file into .css and store it sourcemap_url = self.storage.url(sourcemap_filename) compile_kwargs = { 'filename': filename, 'source_map_filename': sourcemap_url, 'include_paths': self.include_paths, 'custom_functions': custom_functions, } if self.sass_precision: compile_kwargs['precision'] = self.sass_precision if self.sass_output_style: compile_kwargs['output_style'] = self.sass_output_style content, sourcemap = sass.compile(**compile_kwargs) if self.storage.exists(css_filename): self.storage.delete(css_filename) self.storage.save(css_filename, ContentFile(content)) if self.storage.exists(sourcemap_filename): self.storage.delete(sourcemap_filename) self.storage.save(sourcemap_filename, ContentFile(sourcemap)) return url
def absolute_path(self, path, prefix=None): if path.startswith(('http://', 'https://', '/')): return path if prefix is None: if settings.STATIC_URL is None: # backwards compatibility prefix = settings.MEDIA_URL else: prefix = settings.STATIC_URL return urljoin(prefix, path)
def _verify(self, ticket, service): """ Verifies CAS 2.0+ XML-based authentication ticket. Returns tuple (username, [proxy URLs], {attributes}) on success or None on failure. """ params = {'ticket': ticket, 'service': service} if settings.CAS_PROXY_CALLBACK: params.update({'pgtUrl': settings.CAS_PROXY_CALLBACK}) if settings.CAS_RENEW: params.update({'renew': 'true'}) page = requests.get(urljoin(settings.CAS_SERVER_URL, 'proxyValidate'), params=params, verify=settings.CAS_SERVER_SSL_VERIFY, cert=settings.CAS_SERVER_SSL_CERT) try: response = minidom.parseString(page.content) if response.getElementsByTagName('cas:authenticationFailure'): logger.warn("Authentication failed from CAS server: %s", response.getElementsByTagName('cas:authenticationFailure')[0].firstChild.nodeValue) return (None, None, None) username = response.getElementsByTagName('cas:user')[0].firstChild.nodeValue proxies = [] attributes = {} if response.getElementsByTagName('cas:proxyGrantingTicket'): proxies = [p.firstChild.nodeValue for p in response.getElementsByTagName('cas:proxies')] pgt = response.getElementsByTagName('cas:proxyGrantingTicket')[0].firstChild.nodeValue try: pgtIou = self._get_pgtiou(pgt) tgt = Tgt.objects.get(username=username) tgt.tgt = pgtIou.tgt tgt.save() pgtIou.delete() except Tgt.DoesNotExist: Tgt.objects.create(username=username, tgt=pgtIou.tgt) pgtIou.delete() except: logger.error("Failed to do proxy authentication.", exc_info=True) attrib_tag = response.getElementsByTagName('cas:attributes') if attrib_tag: for child in attrib_tag[0].childNodes: if child.nodeType != Node.ELEMENT_NODE: # only parse tags continue attributes[child.tagName] = child.firstChild.nodeValue logger.debug("Cas proxy authentication succeeded for %s with proxies %s", username, proxies) return (username, proxies, attributes) except Exception as e: logger.error("Failed to verify CAS authentication", e) return (None, None, None) finally: page.close()
def get_domain_url(url): domain = getattr(settings, 'ULTIMATETHUMB_DOMAIN', '') if domain: parsed_domain = urlparse.urlparse(domain) # If the domain has no scheme, prepend slashes to make sure the url is # correctly joined. if not parsed_domain.netloc and parsed_domain.path: domain = '//{0}'.format(domain) return urlparse.urljoin(domain, url)
def test_macro_sources_empty_macro_list(mock_requests): """When KumaScript can't return macros, the sources are empty.""" macros_url = urljoin(KUMASCRIPT_BASE_URL, 'macros/') response = { 'can_list_macros': False, 'loader': 'HTTPLoader', 'macros': [] } mock_requests.get(macros_url, json=response) macros = kumascript.macro_sources() assert macros == {}
def _login_url(service): """ Returns a CAS login URL. """ params = {'service': service} if settings.CAS_RENEW: params.update({'renew': 'true'}) elif settings.CAS_GATEWAY: params.update({'gateway': 'true'}) if settings.CAS_EXTRA_LOGIN_PARAMS: params.update(settings.CAS_EXTRA_LOGIN_PARAMS) return urljoin(settings.CAS_SERVER_URL, 'login') + '?' + urlencode(params)