def pingback(request, context): graph, subject = context['graph'], context['subject'] subject_uri = context['subject']._identifier graph += [ (subject_uri, PINGBACK.service, rdflib.URIRef(request.build_absolute_uri(reverse_full('data', 'pingback:xmlrpc')))), (subject_uri, PINGBACK.to, rdflib.URIRef(request.build_absolute_uri(reverse_full('data', 'pingback:rest')))), ]
def detail_url(self): if self.collection.is_emergency_hosted_collection: return reverse_full('www', 'reader.views.emergency_reader', view_kwargs={'collection_id': self.identifier, 'version': self.version, 'variant': self.variant}) return reverse_full('www', self.config.VARIANT_DETAILS_VIEW, view_kwargs={ 'collection_id': self.identifier, 'version': self.public_version, 'variant': self.variant})
def ical_feed(request, show_name=None): cal = vobject.iCalendar() cal.add('method').value = 'PUBLISH' # IE/Outlook needs this str = "" if show_name: str= " for %s" % Show.objects.get(slug=show_name).name cal.add('X-WR-CALNAME').value = "Upcoming episodes%s on xsn" % str cal.add('X-WR-TIMEZONE').value = settings.TIME_ZONE ep = Episode.objects.filter(status='UPCOMING') if show_name: ep = ep.filter(show__slug=show_name) for episode in ep.annotate(beginfilter=Min('parts__begin')).order_by('-beginfilter')[:30]: vevent = cal.add('vevent') val = "%s: %s" % (episode.slug, episode.title()) vevent.add('summary').value = val vevent.add('description').value = episode.description() vevent.add('dtstart').value = episode.begin() vevent.add('dtend').value = episode.end() vevent.add('uid').value = '%s' % episode.pk kwargs = {'show_name': episode.show.slug, 'slug': episode.slug} url = reverse_full("www", "episode", view_kwargs=kwargs) vevent.add('url').value = 'http:%s' % url icalstream = cal.serialize() response = HttpResponse(icalstream, mimetype='text/calendar') #response['Filename'] = 'filename.ics' # IE needs this #response['Content-Disposition'] = 'attachment; filename=filename.ics' return response
def item_extra_kwargs(self, item): #print type(item), type(item.begin), type(item.end) tz = pytz.timezone(settings.TIME_ZONE) begin = item.begin() begin = tz.localize(begin) end = item.end if end: end = tz.localize(end) extra_dict = { 'begin': begin, 'end': end, } if item.status == "RUNNING": try: extra_dict['streams'] = [] for stream in item.channel.stream_set.all(): if not stream.running: continue url = reverse_full("www", "mount", view_kwargs={'stream':stream.mount}) extra_dict['streams'].append("http:%s" % url) extra_dict['listener'] = str(item.channel.listener) extra_dict['channel'] = item.channel.cluster extra_dict['current_song'] = item.channel.streamCurrentSong except Channel.DoesNotExist: pass if item.show.icon: extra_dict['icon'] = "http:%s" % item.show.icon.url if item.url: extra_dict['website'] = item.url() return extra_dict
def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None): original_kwargs = kwargs if urlconf is None: urlconf = get_urlconf() or settings.ROOT_URLCONF # We behave differently if we're using the no-region urlconf with the # no-region django-hosts host. if urlconf == 'main.urls_no_region': if kwargs and kwargs.get('region'): del kwargs['region'] try: return django_reverse(viewname, urlconf=urlconf, args=args, kwargs=kwargs, prefix=prefix, current_app=current_app) except NoReverseMatch as e: if urlconf == 'main.urls_no_region': # Try the base urlconf and original kwargs host = settings.DEFAULT_HOST return reverse_full(host, viewname, view_args=args, view_kwargs=original_kwargs) else: raise e
def resource_url(self, resource_name): return reverse_full('www', self.CONFIG.VIEWS_BASE_NAME + '_' + resource_name, view_kwargs={ 'collection_id': self.collection.identifier, 'version': self.collection.version, 'variant': self.collection.variant, 'module_id': self.module.identifier, })
def doc_backward(url, formats=None): """ Determines the URI a doc page is about. Returns a tuple of (uri, format, canonical). """ parsed_url = urlparse.urlparse(url) query = parse_qs(parsed_url.query) doc_view_url = get_doc_view() if isinstance(doc_view_url, tuple): doc_view_url = reverse_full(*doc_view_url) if _get_host_path(url) == urlparse.urljoin(_get_host_path(url), doc_view_url): return rdflib.URIRef(query.get('uri', [None])[0]), query.get('format', [None])[0], False match = BACKWARD_FORMAT_RE.match(url) url, format = match.group('url'), match.group('format') if format and formats is not None and format not in formats: url, format = '%s.%s' % (url, format), None if with_hosts: url_part = url else: url_part = urlparse.urlparse(url).path for id_prefix, doc_prefix, is_local in get_id_mapping(): doc_prefix = urlparse.urljoin(url, doc_prefix) if url_part.startswith(doc_prefix): url_part = id_prefix + url_part[len(doc_prefix):] return rdflib.URIRef(urllib.unquote(url_part)), format, is_local else: return None, None, None
def html_url(self): return reverse_full('www', self.CONFIG.VOLATILE_VIEW_NAME, view_kwargs={ 'collection_id': self.collection.md_content_id, 'version': self.collection.md_version, 'variant': self.collection.variant, 'module_id': self.module.md_content_id, })
def doc_forwards(uri, graph=None, described=None): """ Determines all doc URLs for a URI. graph is an rdflib.ConjunctiveGraph that can be checked for a description of uri. described is a ternary boolean. """ if isinstance(uri, unicode): encoded_uri = uri.encode('utf-8') else: encoded_uri = urllib.unquote(uri) for id_prefix, doc_prefix, _ in get_id_mapping(): if uri.startswith(id_prefix): base = doc_prefix + urllib.quote(encoded_uri[len(id_prefix):]) pattern = base.replace('%', '%%') + '.%(format)s' return DocURLs(base, pattern) if graph and not described and any(graph.triples((uri, None, None))): described = True if described == False: return DocURLs(encoded_uri, encoded_uri.replace('%', '%%')) view_name = get_doc_view() if described else get_desc_view() base = 'http:%s?%s' % (reverse_full(*view_name), urllib.urlencode((('uri', encoded_uri),))) return DocURLs(base, '%s&format=%%(format)s' % base.replace('%', '%%'))
def done(self, form_list, **kwargs): user_pw = User.objects.make_random_password() user = form_list[0].save(commit=False) user.set_password(user_pw) user.username = ("%s%s" % (user.first_name, user.last_name)).lower() user.save() show = form_list[1].save() channel = form_list[2].save(commit=False) channel.mapping_method = '["find-from-title","make-from-title","find-or-make-live"]' channel.save() channel.show.add(show) stream = form_list[3].save(commit=False) stream.user = "******" stream_pw = User.objects.make_random_password() stream.password = stream_pw stream.channel = channel stream.save() assign('change_channel', user, channel) assign('change_stream', user, channel) assign('change_episodes', user, show) assign('change_show', user, show) mail_data = {'username': user.username, 'password': user_pw} mail_text = _("USERCREATEDMAIL with %(username)s %(password)s") % mail_data mail_subject = _("[xenim] Neuer Nutzer erstellt") send_mail(mail_subject, mail_text, "*****@*****.**", [user.email,]) return HttpResponseRedirect(reverse_full('dashboard', 'dashboard'))
def render(self, context): dirs = self.get_path(context).split('/') # print(dirs) output = '' if len(dirs) < 3: # raise Exception('invalid length of search result: ' + str(dirs)) error('invalid length of search result: ', str(dirs)) return '#' if len(dirs) == 3: output = reverse_full('www', 'reader_variant_details', view_args=dirs[0:3]) elif len(dirs) == 4: output = reverse_full('www', 'reader_module_reader', view_args=dirs[0:4]) elif len(dirs) > 4: output = reverse_full('www', 'reader_module_reader', view_args=dirs[0:4]) + '#' + dirs[-1] return output
def fetch_initial(self): id = self.kwargs.get('id', None) if not id: return kwargs = {'api_name': 'v1', 'resource_name': 'application', 'pk': id} url = "http:%s" % reverse_full('review', 'api_dispatch_detail', view_kwargs=kwargs) header = {'Authorization': 'ApiKey %s:%s' % (self.request.user.username, self.request.user.api_key.key)} r = requests.get(url, params={'format':'json'}, headers=header, verify=False) if not r.status_code == 200: return return r.json()
def read_url(self): if self.collection.is_emergency_hosted_collection: return reverse_full('www', 'reader.views.emergency_reader', view_kwargs={'collection_id': self.identifier, 'version': self.version, 'variant': self.variant}) try: first_module = next(self.collection.get_all_module_occurrences()) # TODO: optimize here return self.bind_module_or_404(first_module.module.md_content_id).url except StopIteration: return None
def render(self, context): host_args = [x.resolve(context) for x in self.host_args] host_kwargs = dict((smart_str(k, 'ascii'), v.resolve(context)) for k, v in six.iteritems(self.host_kwargs)) view_args = [x.resolve(context) for x in self.view_args] view_kwargs = dict((smart_str(k, 'ascii'), v.resolve(context)) for k, v in six.iteritems(self.view_kwargs)) url = reverse_full(self.host, self.view, host_args, host_kwargs, view_args, view_kwargs) if self.asvar: context[self.asvar] = url return '' else: return url
def render(self, context): host_args = [x.resolve(context) for x in self.host_args] host_kwargs = dict((smart_str(k, 'ascii'), v.resolve(context)) for k, v in self.host_kwargs.iteritems()) view_args = [x.resolve(context) for x in self.view_args] view_kwargs = dict((smart_str(k, 'ascii'), v.resolve(context)) for k, v in self.view_kwargs.iteritems()) url = reverse_full(self.host, self.view, host_args, host_kwargs, view_args, view_kwargs) if self.asvar: context[self.asvar] = url return '' else: return url
def get_elephantblog_url(entry): # We use naive date using UTC for conversion for permalink if getattr(settings, 'USE_TZ', False): pub_date = timezone.make_naive(entry.published_on, timezone.utc) else: pub_date = entry.published_on return reverse_full( host='blog' if entry.entry_type == 'blog' else 'main_site', view='blog_entry_detail', view_kwargs={ 'year': pub_date.strftime('%Y'), 'month': pub_date.strftime('%m'), 'day': pub_date.strftime('%d'), 'slug': entry.slug, } )
def doc_forwards(uri, graph=None, described=None): """ Determines all doc URLs for a URI. graph is an rdflib.ConjunctiveGraph that can be checked for a description of uri. described is a ternary boolean (None for 'unknown'). """ if isinstance(uri, unicode): encoded_uri = uri.encode('utf-8') else: encoded_uri = urllib.unquote(uri) for id_prefix, doc_prefix, _ in get_id_mapping(): if uri.startswith(id_prefix): base = doc_prefix + urllib.quote(encoded_uri[len(id_prefix):]) pattern = base.replace('%', '%%') + '.%(format)s' return DocURLs(base, pattern) if graph is not None and not described and any(graph.triples((uri, None, None))): described = True if described == False: return DocURLs(encoded_uri, encoded_uri.replace('%', '%%')) url = get_doc_view() if described else get_desc_view() if isinstance(url, tuple): # This used to return a tuple, now it returns the URL directly url = reverse_full(*url) params = [('uri', encoded_uri)] if not described: from humfrey.desc.views import DescView params.append(('token', DescView.get_uri_token(encoded_uri))) base = '%s?%s' % (url, urllib.urlencode(params)) print base return DocURLs(base, '%s&format=%%(format)s' % base.replace('%', '%%'))
def done(self, form_list, **kwargs): user_pw = User.objects.make_random_password() user = form_list[0].save(commit=False) user.set_password(user_pw) user.username = ("%s%s" % (user.first_name, user.last_name)).lower() user.save() show = form_list[1].save() channel = form_list[2].save(commit=False) channel.mapping_method = '["find-from-title","make-from-title","find-or-make-live"]' channel.save() channel.show.add(show) stream = form_list[3].save(commit=False) stream.user = "******" stream_pw = User.objects.make_random_password() stream.password = stream_pw stream.channel = channel stream.save() assign('change_channel', user, channel) assign('change_stream', user, channel) assign('change_episodes', user, show) assign('change_show', user, show) mail_data = {'username': user.username, 'password': user_pw} mail_text = _("USERCREATEDMAIL with %(username)s %(password)s") % mail_data mail_subject = _("[xenim] Neuer Nutzer erstellt") send_mail(mail_subject, mail_text, "*****@*****.**", [user.email,]) if 'rt_id' in self.storage.extra_data: mail_data_rt = {'username': user.username, 'showname': show.name, 'channel': channel.cluster, 'streamname': stream.mount} mail_text_rt = _("The following objects have been created:\n\n\tUser:\t%(username)s\n\tShow:\t%(showname)s\n\tChannel:\t%(channel)s\n\tStream:\t%(streamname)s\n") % mail_data_rt mail_subject_rt = _("[xsn #%i] Neuer Nutzer") % self.storage.extra_data['rt_id'] send_mail(mail_subject_rt, mail_text_rt, "*****@*****.**", ["*****@*****.**",]) return HttpResponseRedirect(reverse_full('dashboard', 'dashboard'))
def doc_backward(url, formats=None): parsed_url = urlparse.urlparse(url) query = parse_qs(parsed_url.query) host_path = '//{0}{1}'.format(parsed_url.netloc, parsed_url.path) if host_path == reverse_full(*get_desc_view()): return rdflib.URIRef(query.get('uri', [None])[0]), query.get('format', [None])[0], False match = BACKWARD_FORMAT_RE.match(url) url, format = match.group('url'), match.group('format') if format and formats is not None and format not in formats: url, format = '%s.%s' % (url, format), None if with_hosts: url_part = url else: url_part = urlparse.urlparse(url).path for id_prefix, doc_prefix, is_local in get_id_mapping(): doc_prefix = urlparse.urljoin(url, doc_prefix) if url_part.startswith(doc_prefix): url_part = id_prefix + url_part[len(doc_prefix):] return rdflib.URIRef(urllib.unquote(url_part)), format, is_local else: return None, None, None
def esi_url(context, host, view, *args, **kwargs): url = reverse_full(host, view, view_args=args, view_kwargs=kwargs) return esi_link(context, url)
def host_url(host, view, *args, **kwargs): return reverse_full(host, view, view_args=args, view_kwargs=kwargs)
def redirect(link): return reverse_full('redirect', 'redirect', (), {}, (encode_link(link),))
def static_get_absolute_url(cls, md_content_id, md_version, variant): return reverse_full('www', cls.CONFIG.VARIANT_DETAILS_VIEW, view_kwargs={ 'collection_id': md_content_id, 'version': int(md_version), 'variant': variant})
def test_reverse_custom_scheme(self): self.assertEqual(reverse_full('scheme', 'simple-direct'), 'https://scheme.spam.eggs/simple/')
def test_reverse_full_without_www(self): self.assertEqual(reverse_full('without_www', 'simple-direct'), '//example.com/simple/')
def test_reverse_full(self): self.assertEqual(reverse_full('static', 'simple-direct'), '//static.spam.eggs/simple/')
def dependencies_url(self): return reverse_full('www', self.CONFIG.view_name('module_dependencies'), view_kwargs={'module_id': self.module.identifier, 'version': self.module.version})
def get_mobile_descriptor_url(self): return reverse_full('www', 'preview.views.mobile_descriptor', view_args=[self.md_content_id, self.md_version])
def upload_dataset_metadata(sender, store, graphs, when, **kwargs): if store.slug != DEFAULT_STORE_SLUG: return if not graphs: logger.debug("No graphs updated for %r; aborting", store.slug) return if not getattr(settings, 'CKAN_API_KEY', None): logger.debug("No CKAN_API_KEY setting, not doing anything.") return client = ckanclient.CkanClient(api_key=settings.CKAN_API_KEY) endpoint = Endpoint(settings.ENDPOINT_QUERY) query = _dataset_query % ' \n'.join('(%s)' % rdflib.URIRef(g).n3() for g in graphs) graph = endpoint.query(query) datasets = list(graph.subjects(NS.rdf.type, NS.void.Dataset)) if len(datasets) != 1: logger.debug("Expected one dataset; got %d", len(datasets)) return dataset = Resource(datasets[0], graph, endpoint) find = functools.partial(_find, graph, dataset._identifier) patterns = settings.CKAN_PATTERNS package_name = find('skos:notation', HUMFREY.theDataHubDatasetName) if not package_name: return #package_name = patterns.get('name', '%s') % slug package_title = patterns.get('title', '%s') % dataset.label author = find('dcterms:publisher/foaf:name|rdfs:label|dc:title|skos:prefLabel|dcterms:title') if author: author = patterns.get('author', '%s') % author description = find('rdfs:comment|dcterms:description', (NS.xtypes['Fragment-Markdown'], NS.xtypes['Fragment-PlainText'], None)) maintainer = find('oo:contact/foaf:name|rdfs:label|dc:title|skos:prefLabel|dcterms:title') if maintainer: maintainer = patterns.get('maintainer', '%s') % maintainer maintainer_email = find('oo:contact/foaf:mbox|v:email') if maintainer_email: maintainer_email = maintainer_email.replace('mailto:', '') license = find('dcterms:license|cc:license') if license: license = _licenses.get(unicode(license)) sparql_endpoint = find('void:sparqlEndpoint') if sparql_endpoint: sparql_endpoint = unicode(sparql_endpoint) else: sparql_endpoint = 'http:' + reverse_full('data', 'sparql:endpoint') tags = find('humfrey:theDataHubDatasetTag', all=True) groups = find('humfrey:theDataHubDatasetGroup', all=True) url = doc_forward(dataset.uri) logger.debug("Fetching existing record for %r", package_name) try: package_entity = client.package_entity_get(package_name) logger.debug("Record successfully retrieved") except ckanclient.CkanApiNotFoundError: package_entity = {'name': package_name} client.package_register_post(package_entity) logger.debug("No record found; starting from empty") original = copy.deepcopy(package_entity) package_entity.update({'name': package_name, 'title': package_title, 'url': url, 'notes': description, 'license_id': license, 'author': author, 'maintainer': maintainer, 'maintainer_email': dataset.oo_contact.get_one_of('foaf:mbox', 'v:email').replace('mailto:', '', 1)}) package_entity['groups'] = list(settings.CKAN_GROUPS | set(package_entity.get('groups', ())) | groups) package_entity['tags'] = list(settings.CKAN_TAGS | set(package_entity.get('tags', ())) | tags) resources = collections.defaultdict(dict, ((r.get('name'), r) for r in package_entity.get('resources', ()))) resources['SPARQL endpoint'].update({'name': 'SPARQL endpoint', 'format': 'api/sparql', 'url': sparql_endpoint}) package_entity['resources'] = resources.values() logger.debug("Updated CKAN record") if original != package_entity: logger.info("Updating %r at thedatahub.org", package_name) client.package_entity_put(package_entity)
def pingback(request, graph, doc_uri, subject_uri, subject, endpoint, renderers): graph += [ (subject_uri, PINGBACK.service, rdflib.URIRef(request.build_absolute_uri(reverse_full('data', 'pingback:xmlrpc')))), (subject_uri, PINGBACK.to, rdflib.URIRef(request.build_absolute_uri(reverse_full('data', 'pingback:rest')))), ]
def url(self): return 'http:' + reverse_full('www', self.config.MODULE_VIEW_NAME, view_kwargs={'collection_id': self.collection_presentation.identifier, 'version': self.collection_presentation.public_version, 'variant': self.collection_presentation.variant, 'module_id': self.module_id})
def process_response(self, request, response): response['X-Pingback'] = request.build_absolute_uri( reverse_full('data', 'pingback:xmlrpc')) return response