Exemple #1
0
def test_iri_support():
    """The IRI support"""
    assert_raises(UnicodeError, uri_to_iri, u'http://föö.com/')
    assert_raises(UnicodeError, iri_to_uri, 'http://föö.com/')
    assert uri_to_iri('http://xn--n3h.net/') == u'http://\u2603.net/'
    assert uri_to_iri('http://%C3%BCser:p%C3%[email protected]/p%C3%A5th') == \
        u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
    assert iri_to_uri(u'http://☃.net/') == 'http://xn--n3h.net/'
    assert iri_to_uri(u'http://üser:pässword@☃.net/påth') == \
        'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th'

    assert uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F') == \
        u'http://test.com/%3Fmeh?foo=%26%2F'
Exemple #2
0
def _ensure_api():
    access_token = flask.session.get("ig.access_token", None)
    if access_token: return
    api = _get_api()
    url = flask.request.url
    url = werkzeug.iri_to_uri(url)
    return api.oauth_authorize(state=url)
Exemple #3
0
def _ensure_api():
    access_token = flask.session.get("ig.access_token", None)
    if access_token: return
    api = _get_api()
    url = flask.request.url
    url = werkzeug.iri_to_uri(url)
    return api.oauth_authorize(state = url)
Exemple #4
0
def test_iri_support():
    """The IRI support"""
    assert_raises(UnicodeError, uri_to_iri, u'http://föö.com/')
    assert_raises(UnicodeError, iri_to_uri, 'http://föö.com/')
    assert uri_to_iri('http://xn--n3h.net/') == u'http://\u2603.net/'
    assert uri_to_iri('http://%C3%BCser:p%C3%[email protected]/p%C3%A5th') == \
        u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
    assert iri_to_uri(u'http://☃.net/') == 'http://xn--n3h.net/'
    assert iri_to_uri(u'http://üser:pässword@☃.net/påth') == \
        'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th'

    assert uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F') == \
        u'http://test.com/%3Fmeh?foo=%26%2F'

    # this should work as well, might break on 2.4 because of a broken
    # idna codec
    assert uri_to_iri('/foo') == u'/foo'
    assert iri_to_uri(u'/foo') == '/foo'
 def suggest(self):
     url = 'http://%s:%s/%s/' % (self.host, self.port, self.application)
     if self.core != '':
         url += '%s/' % self.core
     params = '%s?spellcheck.q=%s&wt=%s&json.nl=%s&omitHeader=%s' % (
         self.handler, urllib.urlencode(self.suggest_query), self.writer,
         self.json_nl, self.omitHeader)
     self.request_url = '%s%s' % (url, params)
     # self.response = eval(urllib.request.urlopen(iri_to_uri(self.request_url)).read())
     self.response = eval(requests.get(iri_to_uri(self.request_url)).text)
     self.suggestions = self.response.get('spellcheck').get('suggestions')
 def suggest(self):
     url = 'http://%s:%s/%s/' % (self.host, self.port, self.application)
     if self.core != '':
         url += '%s/' % self.core
     params = '%s?spellcheck.q=%s&wt=%s&json.nl=%s&omitHeader=%s' % (self.handler,
                                                                        urllib.urlencode(
                                                                            self.suggest_query),
                                                                        self.writer, self.json_nl, self.omitHeader)
     self.request_url = '%s%s' % (url, params)
     # self.response = eval(urllib.request.urlopen(iri_to_uri(self.request_url)).read())
     self.response = eval(requests.get(iri_to_uri(self.request_url)).text)
     self.suggestions = self.response.get('spellcheck').get('suggestions')
    def request(self):
        params = ''
        url = 'http://%s:%s/%s/' % (self.host, self.port, self.application)
        if self.core != '':
            url += '%s/' % self.core
        fuzzy_tilde = ''
        if self.fuzzy == 'true':
            fuzzy_tilde = '~'
        #logging.info(self.facet)
        #logging.info(self.facet_fields)
        if self.facet == 'true':  # Old-style facetting...
            facets = '&facet.field='.join(self.facet_fields)
            params = '%s?q=%s%s&wt=%s&start=%s&rows=%s&facet.limit=%s&facet.mincount=%s&facet.offset=%s&facet.field=%s&json.nl=%s&facet=%s&facet.sort=%s&omitHeader=%s&defType=%s&facet.threads=-1' % (
                self.handler, self.query, fuzzy_tilde, self.writer, self.start,
                self.rows, self.facet_limit, self.facet_mincount,
                self.facet_offset, facets, self.json_nl, self.facet,
                self.facet_sort, self.omitHeader, self.defType)
            if self.boost_most_recent == 'true':
                params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            # params = '%s/%s?q=%s%s&wt=%s&start=%s&rows=%s&facet.limit=%s&facet.mincount=%s&facet.offset=%s&facet.field=%s&json.nl=%s&facet=%s&facet.sort=%s&omitHeader=%s&defType=%s' % (
            #     self.application, self.handler, self.query, fuzzy_tilde, self.writer, self.start, self.rows, self.facet_limit,
            #     self.facet_mincount, self.facet_offset, facets, self.json_nl, self.facet, self.facet_sort,
            #     self.omitHeader, self.defType)
            if len(self.sort_facet_by_index) > 0:
                for sortfield in self.sort_facet_by_index:
                    params += '&f.%s.facet.sort=homepage&f.%s.facet.limit=-1' % (
                        sortfield, sortfield
                    )  # Stupid hack until SOLR-1672 gets fixed
            # Pivot needs a mincount of 0 for empty categories. Build mincounts of 1 for normal facets...
            for myfacet in self.facet_fields:
                params += '&f.%s.facet.mincount=1' % myfacet
            if len(self.facet_tree) > 0:
                params += '&facet.pivot='
                for field in self.facet_tree:
                    # params += '&facet.pivot=%s,%s' % (self.facet_tree[0], self.facet_tree[1])
                    params += '%s,' % field
                params = params[:-1]
        else:
            params = '%s?q=%s%s&wt=%s&start=%s&rows=%s&json.nl=%s&omitHeader=%s&defType=%s' % (
                self.handler, self.query, fuzzy_tilde, self.writer, self.start,
                self.rows, self.json_nl, self.omitHeader, self.defType)
            if self.boost_most_recent == 'true':
                params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            if self.writer == 'csv':
                params += '&csv.separator=%s' % self.csv_separator
            # logging.info(self.json_facet)
            if self.json_facet:
                params += '&json.facet=%s' % (json.dumps(self.json_facet))
        if len(self.fquery) > 0:
            for fq in (self.fquery):
                try:
                    # params += '&fq=%s' % urllib.parse.unquote(fq.encode('utf8'))
                    val = urllib.parse.unquote(fq).replace('#', '\%23')
                    # logging.info('%s >> %s' % (fq,val))
                    params += '&fq=%s' % val
                except UnicodeDecodeError:
                    params += '&fq=%s' % urllib.parse.unquote(fq)
        if self.sort:
            # if self.cursor:
            # params += '&sort=katkey+asc&cursorMark=%s' % self.cursor
            # elif self.sort != 'score desc':
            if self.sort != 'score desc':
                params += '&sort=%s' % self.sort
        if len(self.fields) > 0:
            if self.application == 'elevate':
                self.fields.append('[elevated]')
            params += '&fl=%s' % '+'.join(self.fields)
        if self.mlt is True:
            self.facet = 'false'
            mparams = '%s?q=%s&mlt=true&mlt.fl=%s&mlt.count=10&fl=%s&wt=%s&defType=%s' % (
                self.handler, self.query, '+'.join(self.mlt_fields), '+'.join(
                    self.fields), self.writer, self.defType)
            # if self.boost_most_recent == 'true':
            #     params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            # self.response = eval(urllib.request.urlopen('%s%s' % (url, mparams)).read())
            # logging.info(url)
            # logging.info(mparams)
            self.response = eval(requests.get('%s%s' % (url, mparams)).text)
            for mlt in self.response.get('moreLikeThis'):
                self.mlt_results = self.response.get('moreLikeThis').get(
                    mlt).get('docs')
        if self.spellcheck == 'true':
            params += '&spellcheck=true&spellcheck.collate=%s&spellcheck.count=%s' % (
                self.spellcheck_collate, self.spellcheck_count)
        if self.group[0]:
            params += '&group=true&group.field=%s&group.limit=%s&group.sort=%s&group.ngroups=%s' % (
                self.group_field[0], self.group_limit[0], self.group_sort[0],
                self.group_ngroups[0])
        if self.coordinates != '0,0':
            try:
                params += '&pt=%s&sfield=geolocation&fl=*+dist_:geodist()' % self.coordinates
            except UnicodeDecodeError:
                params += '&pt=%s&sfield=geolocation&fl=*+dist_:geodist()' % self.coordinates.decode(
                    'utf8')
        if self.queryField:
            params += '&qf=%s' % self.queryField
        if self.stats == 'true':
            params += '&stats=true&stats.field=' + '&stats.field='.join(
                self.stats_fl)
        if self.handler != 'query':
            params += '&q.op=AND'

        self.request_url = '%s%s' % (url, params)
        # logging.fatal(iri_to_uri(self.request_url))
        # logging.info('REQUEST: %s' % self.request_url)
        if self.compress:
            import urllib2
            import StringIO
            import gzip

            request = urllib2.Request(iri_to_uri(self.request_url))
            request.add_header('Accept-encoding', 'gzip')
            opener = urllib2.build_opener()
            compresseddata = opener.open(request).read()
            compressedstream = StringIO.StringIO(compresseddata)
            gzipper = gzip.GzipFile(fileobj=compressedstream)

            self.response = eval(gzipper.read())
        else:
            # logging.error(self.request_url)
            try:
                # self.response = eval(urllib.request.urlopen(iri_to_uri(self.request_url)).read())
                self.response = eval(
                    requests.get(iri_to_uri(self.request_url)).text)
            except NameError:
                # self.response = urllib.request.urlopen(iri_to_uri(self.request_url)).read()
                self.response = requests.get(iri_to_uri(self.request_url)).text
            except SyntaxError:
                # self.response = urllib.request.urlopen(iri_to_uri(self.request_url)).read()
                self.response = requests.get(iri_to_uri(self.request_url)).text
            # self.response = eval(urllib.request.urlopen(self.request_url).read())
        # logging.error(self.response)
        try:
            self.results = self.response.get('response').get('docs')
        except AttributeError:  # Grouped results...
            # logging.fatal(e)
            # logging.error(self.response)
            try:
                if self.response.get('grouped'):
                    self.results = self.response.get('grouped').get(
                        self.group_field[0]).get('groups')
            except AttributeError:
                pass
        if self.facet == 'true':
            self.facets = self.response.get('facet_counts').get('facet_fields')
            # logging.info(self.facets)
        if len(self.facet_tree) > 0:
            self.tree = self.response.get('facet_counts').get('facet_pivot')
        if self.json_facet:
            # logging.info(self.response.get('facets'))
            self.facets = self.response.get('facets')
        if self.spellcheck == 'true' or self.handler.endswith('suggest'):
            try:
                self.suggestions = self.response.get('spellcheck').get(
                    'suggestions')
            except AttributeError:
                pass
        if self.omitHeader != 'true':
            self.qtime = float(
                self.response.get('responseHeader').get('QTime')) / 1000
Exemple #8
0
def bounce(key):
    try:
        uri = store[key]
        return redirect(iri_to_uri(uri))
    except KeyError as e:
        return jsonify({"error": "url not found"}, 400)
    def request(self):
        params = ''
        url = 'http://%s:%s/%s/' % (self.host, self.port, self.application)
        if self.core != '':
            url += '%s/' % self.core
        fuzzy_tilde = ''
        if self.fuzzy == 'true':
            fuzzy_tilde = '~'
        #logging.info(self.facet)
        #logging.info(self.facet_fields)
        if self.facet == 'true': # Old-style facetting...
            facets = '&facet.field='.join(self.facet_fields)
            params = '%s?q=%s%s&wt=%s&start=%s&rows=%s&facet.limit=%s&facet.mincount=%s&facet.offset=%s&facet.field=%s&json.nl=%s&facet=%s&facet.sort=%s&omitHeader=%s&defType=%s&facet.threads=-1' % (
                self.handler, self.query, fuzzy_tilde, self.writer, self.start, self.rows, self.facet_limit,
                self.facet_mincount, self.facet_offset, facets, self.json_nl, self.facet, self.facet_sort,
                self.omitHeader, self.defType)
            if self.boost_most_recent == 'true':
                params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            # params = '%s/%s?q=%s%s&wt=%s&start=%s&rows=%s&facet.limit=%s&facet.mincount=%s&facet.offset=%s&facet.field=%s&json.nl=%s&facet=%s&facet.sort=%s&omitHeader=%s&defType=%s' % (
            #     self.application, self.handler, self.query, fuzzy_tilde, self.writer, self.start, self.rows, self.facet_limit,
            #     self.facet_mincount, self.facet_offset, facets, self.json_nl, self.facet, self.facet_sort,
            #     self.omitHeader, self.defType)
            if len(self.sort_facet_by_index) > 0:
                for sortfield in self.sort_facet_by_index:
                    params += '&f.%s.facet.sort=homepage&f.%s.facet.limit=-1' % (
                        sortfield, sortfield)# Stupid hack until SOLR-1672 gets fixed
            # Pivot needs a mincount of 0 for empty categories. Build mincounts of 1 for normal facets...
            for myfacet in self.facet_fields:
                params += '&f.%s.facet.mincount=1' % myfacet
            if len(self.facet_tree) > 0:
                params += '&facet.pivot='
                for field in self.facet_tree:
                    # params += '&facet.pivot=%s,%s' % (self.facet_tree[0], self.facet_tree[1])
                    params += '%s,' % field
                params = params[:-1]
        else:
            params = '%s?q=%s%s&wt=%s&start=%s&rows=%s&json.nl=%s&omitHeader=%s&defType=%s' % (
                self.handler, self.query, fuzzy_tilde, self.writer, self.start, self.rows, self.json_nl,
                self.omitHeader, self.defType)
            if self.boost_most_recent == 'true':
                params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            if self.writer == 'csv':
                params += '&csv.separator=%s' % self.csv_separator
            # logging.info(self.json_facet)
            if self.json_facet:
                params += '&json.facet=%s' % (json.dumps(self.json_facet))
        if len(self.fquery) > 0:
            for fq in (self.fquery):
                try:
                    # params += '&fq=%s' % urllib.parse.unquote(fq.encode('utf8'))
                    val = urllib.parse.unquote(fq).replace('#', '\%23')
                    # logging.info('%s >> %s' % (fq,val))
                    params += '&fq=%s' % val
                except UnicodeDecodeError:
                    params += '&fq=%s' % urllib.parse.unquote(fq)
        if self.sort:
            # if self.cursor:
                # params += '&sort=katkey+asc&cursorMark=%s' % self.cursor
            # elif self.sort != 'score desc':
            if self.sort != 'score desc':
                params += '&sort=%s' % self.sort
        if len(self.fields) > 0:
            if self.application == 'elevate':
                self.fields.append('[elevated]')
            params += '&fl=%s' % '+'.join(self.fields)
        if self.mlt is True:
            self.facet = 'false'
            mparams = '%s?q=%s&mlt=true&mlt.fl=%s&mlt.count=10&fl=%s&wt=%s&defType=%s' % (
                self.handler, self.query, '+'.join(self.mlt_fields), '+'.join(self.fields),
                self.writer, self.defType)
            # if self.boost_most_recent == 'true':
            #     params += '&boost=recip(ms(NOW/YEAR,year_boost),3.16e-11,1,1)'
            # self.response = eval(urllib.request.urlopen('%s%s' % (url, mparams)).read())
            # logging.info(url)
            # logging.info(mparams)
            self.response = eval(requests.get('%s%s' % (url, mparams)).text)
            for mlt in self.response.get('moreLikeThis'):
                self.mlt_results = self.response.get('moreLikeThis').get(mlt).get('docs')
        if self.spellcheck == 'true':
            params += '&spellcheck=true&spellcheck.collate=%s&spellcheck.count=%s' % (
                self.spellcheck_collate, self.spellcheck_count)
        if self.group[0]:
            params += '&group=true&group.field=%s&group.limit=%s&group.sort=%s&group.ngroups=%s' % (
                self.group_field[0], self.group_limit[0], self.group_sort[0], self.group_ngroups[0])
        if self.coordinates != '0,0':
            try:
                params += '&pt=%s&sfield=geolocation&fl=*+dist_:geodist()' % self.coordinates
            except UnicodeDecodeError:
                params += '&pt=%s&sfield=geolocation&fl=*+dist_:geodist()' % self.coordinates.decode('utf8')
        if self.queryField:
            params += '&qf=%s' % self.queryField
        if self.stats == 'true':
            params += '&stats=true&stats.field=' + '&stats.field='.join(self.stats_fl)
        if self.handler != 'query':
            params += '&q.op=AND'

        self.request_url = '%s%s' % (url, params)
        # logging.fatal(iri_to_uri(self.request_url))
        # logging.info('REQUEST: %s' % self.request_url)
        if self.compress:
            import urllib2
            import StringIO
            import gzip

            request = urllib2.Request(iri_to_uri(self.request_url))
            request.add_header('Accept-encoding', 'gzip')
            opener = urllib2.build_opener()
            compresseddata = opener.open(request).read()
            compressedstream = StringIO.StringIO(compresseddata)
            gzipper = gzip.GzipFile(fileobj=compressedstream)

            self.response = eval(gzipper.read())
        else:
            # logging.error(self.request_url)
            try:
                # self.response = eval(urllib.request.urlopen(iri_to_uri(self.request_url)).read())
                self.response = eval(requests.get(iri_to_uri(self.request_url)).text)
            except NameError:
                # self.response = urllib.request.urlopen(iri_to_uri(self.request_url)).read()
                self.response = requests.get(iri_to_uri(self.request_url)).text
            except SyntaxError:
                # self.response = urllib.request.urlopen(iri_to_uri(self.request_url)).read()
                self.response = requests.get(iri_to_uri(self.request_url)).text
            # self.response = eval(urllib.request.urlopen(self.request_url).read())
        # logging.error(self.response)
        try:
            self.results = self.response.get('response').get('docs')
        except AttributeError: # Grouped results...
            # logging.fatal(e)
            # logging.error(self.response)
            try:
                if self.response.get('grouped'):
                    self.results = self.response.get('grouped').get(self.group_field[0]).get('groups')
            except AttributeError:
                pass
        if self.facet == 'true':
            self.facets = self.response.get('facet_counts').get('facet_fields')
            # logging.info(self.facets)
        if len(self.facet_tree) > 0:
            self.tree = self.response.get('facet_counts').get('facet_pivot')
        if self.json_facet:
            # logging.info(self.response.get('facets'))
            self.facets = self.response.get('facets')
        if self.spellcheck == 'true' or self.handler.endswith('suggest'):
            try:
                self.suggestions = self.response.get('spellcheck').get('suggestions')
            except AttributeError:
                pass
        if self.omitHeader != 'true':
            self.qtime = float(self.response.get('responseHeader').get('QTime')) / 1000