def google_search(search_url, suggest_url, title, description, jsonpath=None): """Generate an engine definition for a Google search. Args: search_url (str): Search URL template suggest_url (str): Suggest URL template title (unicode): Engine title description (unicode): Engine description jsonpath (unicode, optional): JSONPath for results """ kwargs = {} if jsonpath: kwargs['jsonpath'] = jsonpath data = mkdata(title, description, **kwargs) for l in langs(): s = mkvariant(l.id.lower(), l.name, u'{} ({})'.format(title, l.name), search_url.format(hl=l.id), suggest_url=suggest_url.format(hl=l.id), # lang=l.id.lower(), ) data['variants'].append(s) print(json.dumps(data, sort_keys=True, indent=2))
def google_search(search_url, suggest_url, title, description, jsonpath=None): """Generate an engine definition for a Google search. Args: search_url (str): Search URL template suggest_url (str): Suggest URL template title (unicode): Engine title description (unicode): Engine description jsonpath (unicode, optional): JSONPath for results """ kwargs = {} if jsonpath: kwargs['jsonpath'] = jsonpath data = mkdata(title, description, **kwargs) for l in langs(): s = mkvariant( l.id.lower(), l.name, u'{} ({})'.format(title, l.name), search_url.format(hl=l.id), suggest_url=suggest_url.format(hl=l.id), # lang=l.id.lower(), ) data['variants'].append(s) print(json.dumps(data, sort_keys=True, indent=2))
def main(): """Print Wiktionary engine JSON to STDOUT.""" data = mkdata('Wiktionary', 'Collaborative dictionary', pcencode=True) soup = BS(html(), 'html.parser') # Use 'langlist-large' css class for wiktionaries with # 10K+ entries. # Class 'langlist' will get wikitionaries with 100+ entries. for div in soup.find_all('div', class_='langlist-large'): for link in div.select('ul li a'): # log('link=%r', link) lang = id_ = link['lang'] url = 'https:' + link['href'] name = link.get_text() latin = link.get('title') if latin: name = u'{} / {}'.format(name, latin) w = Wiki(id_, url, lang, name) # log('%r', w) url = SEARCH_URL.format(w=w) d = mkvariant(w.lang, w.name, u'Wiktionary ({w.name})'.format(w=w), url, SUGGEST_URL.format(w=w), ) data['variants'].append(d) print(json.dumps(data, sort_keys=True, indent=2))
def stores(): """Amazon variants for different stores. Yields: dict: Engine variant configuration. """ data = [ { 'name': u'United States', 'tld': 'com', 'ctld': 'com', 'market': 1, }, { 'name': u'United Kingdom', 'tld': 'co.uk', 'ctld': 'co.uk', 'market': 3, }, { 'name': u'Canada', 'tld': 'ca', 'ctld': 'com', 'market': 7, }, { 'name': u'Deutschland', 'tld': 'de', 'ctld': 'co.uk', 'market': 4, }, { 'name': u'France', 'tld': 'fr', 'ctld': 'co.uk', 'market': 5, }, { 'name': u'España', 'tld': 'es', 'ctld': 'co.uk', 'market': 44551, }, { 'name': u'Brasil', 'tld': 'com.br', 'ctld': 'com', 'market': 526970, }, ] for d in data: # log('d=%r', d) s = mkvariant( d['tld'], d['name'], u'Amazon {}'.format(d['name']), SEARCH_URL.format(**d), SUGGEST_URL.format(**d), ) yield s
def main(): """Print Wiktionary engine JSON to STDOUT.""" data = mkdata('Wiktionary', 'Collaborative dictionary', pcencode=True) soup = BS(html(), 'html.parser') # Use 'langlist-large' css class for wiktionaries with # 10K+ entries. # Class 'langlist' will get wikitionaries with 100+ entries. for div in soup.find_all('div', class_='langlist-large'): for link in div.select('ul li a'): # log('link=%r', link) lang = id_ = link['lang'] url = 'https:' + link['href'] name = link.get_text() latin = link.get('title') if latin: name = u'{} / {}'.format(name, latin) w = Wiki(id_, url, lang, name) # log('%r', w) url = SEARCH_URL.format(w=w) d = mkvariant( w.lang, w.name, u'Wiktionary ({w.name})'.format(w=w), url, SUGGEST_URL.format(w=w), ) data['variants'].append(d) print(json.dumps(data, sort_keys=True, indent=2))
def lang2search(l): """Convert `Lang` to search `dict`.""" desc = u'Bing ({})'.format(l.name) return mkvariant(l.code.lower(), l.name, desc, SEARCH_URL.format(l=l), SUGGEST_URL.format(l=l), )
def stores(): """Amazon variants for different stores. Yields: dict: Engine variant configuration. """ data = [ { 'name': u'United States', 'tld': 'com', 'ctld': 'com', 'market': 1, }, { 'name': u'United Kingdom', 'tld': 'co.uk', 'ctld': 'co.uk', 'market': 3, }, { 'name': u'Canada', 'tld': 'ca', 'ctld': 'com', 'market': 7, }, { 'name': u'Deutschland', 'tld': 'de', 'ctld': 'co.uk', 'market': 4, }, { 'name': u'France', 'tld': 'fr', 'ctld': 'co.uk', 'market': 5, }, { 'name': u'España', 'tld': 'es', 'ctld': 'co.uk', 'market': 44551, }, { 'name': u'Brasil', 'tld': 'com.br', 'ctld': 'com', 'market': 526970, }, ] for d in data: # log('d=%r', d) s = mkvariant(d['tld'], d['name'], u'Amazon {}'.format(d['name']), SEARCH_URL.format(**d), SUGGEST_URL.format(**d), ) yield s
def yt2search(y): """Convert `YT` to search `dict`.""" uid = u'youtube.{}'.format(y.country) desc = u'YouTube ({})'.format(y.name) return mkvariant(y.country.lower(), y.name, desc, SEARCH_URL.format(y=y), SUGGEST_URL.format(y=y), )
def wiki2search(w): """Convert `Lang` to search `dict`.""" desc = u'{} (Wikia)'.format(w.name) return mkvariant( w.subdomain.lower(), w.name, desc, SEARCH_URL.format(w=w), SUGGEST_URL.format(w=w), )
def yt2search(y): """Convert `YT` to search `dict`.""" uid = u'youtube.{}'.format(y.country) desc = u'YouTube ({})'.format(y.name) return mkvariant( y.country.lower(), y.name, desc, SEARCH_URL.format(y=y), SUGGEST_URL.format(y=y), )
def main(): """Print eBay engine JSON to STDOUT.""" data = mkdata(u'eBay', u'Online auction search') for v in variants(): s = mkvariant(v.uid.lower(), v.name, u'eBay {}'.format(v.name), SEARCH_URL.format(tld=v.tld), SUGGEST_URL.format(site=v.site), ) data['variants'].append(s) print(json.dumps(data, sort_keys=True, indent=2))
def main(): """Print DDG engine JSON to STDOUT.""" data = mkdata( u'Duck Duck Go', u'Alternative search engine', jsonpath='$[*].phrase', ) for v in variants(): s = mkvariant( v.id.lower(), v.name, u'Duck Duck Go {}'.format(v.name), SEARCH_URL.format(kl=v.id), SUGGEST_URL.format(kl=v.id), ) data['variants'].append(s) print(json.dumps(data, sort_keys=True, indent=2))