def __init__(self): super().__init__() self._url_opener = kpn.build_urllib_opener() self._newest_version = kp.version() self._newest_release = None self._no_nag = False self._kind = self.DEFAULT_KIND
def generate_cache(self): cache_path = self.get_cache_path() should_generate = False try: last_modified = datetime.fromtimestamp( os.path.getmtime(cache_path)).date() if ((last_modified - datetime.today().date()).days > self.DAYS_KEEP_CACHE): should_generate = True except Exception as exc: should_generate = True if not should_generate: return False try: opener = kpnet.build_urllib_opener() with opener.open(self.GITMOJI_URL) as request: response = request.read() except Exception as exc: self.err( "Could not reach the gitmoji repository file to generate the cache: ", exc) data = json.loads(response) with open(cache_path, "w") as index_file: json.dump(data, index_file, indent=2)
def query(self, plugin, search_terms): # prepare the connection opener opener = kpnet.build_urllib_opener() if self.api_headers: opener.addheaders = self.api_headers[:] # we slice the list just in case # prepare the query placeholders = { 'terms': search_terms, 'time': str(int(time.time()))} url = self._fill_placeholders(self.api_base, urllib.parse.quote, **placeholders) data = None if self.api_args: cooked_args = self._cook_args(self.api_args, **placeholders) if self.api_method.lower() == "post": data = cooked_args.encode("utf-8") else: url += "?" + cooked_args # do query with opener.open(url, data=data) as conn: response = conn.read() # parse response to get a list of suggestions (str) return self.api_parser(plugin, self, response)
def on_events(self, flags): if flags & kp.Events.PACKCONFIG: self._read_config() self.on_catalog() if flags & kp.Events.NETOPTIONS: self._url_opener = kpn.build_urllib_opener()
def forgeRequest(self, url,typ,dataRaw): opener = kpnet.build_urllib_opener() opener.addheaders = [("X-Auth-Token", str(self.AUTH)),("X-User-Id",str(self.USER_ID))] if typ == 'POST': data = urllib.parse.urlencode(dataRaw).encode() req = urllib.request.Request(url, data=data) return opener.open(req) else: return opener.open(url)
def on_suggest(self, user_input, items_chain): if not user_input: return if not items_chain: return initial_item = items_chain[0] if self.should_terminate(0.25): return if initial_item.category() != kp.ItemCategory.KEYWORD: return suggestions = [] origin_word = user_input.strip() word = urllib.parse.quote_plus(origin_word) try: # get translated version of terms opener = kpnet.build_urllib_opener() opener.addheaders = [("User-agent", self.API_USER_AGENT)] rnum = str(random.randint(0, 10000)) sign = self.get_md5(self._key + origin_word + rnum + self._keyfrom) url = self.URL_YOUDAO.format(word, rnum, sign) print(url) with opener.open(url) as conn: response = conn.read() if self.should_terminate(): return results = self._parse_api_response(response) except urllib.error.HTTPError as exc: suggestions.append( self.create_error_item(label=user_input, short_desc=str(exc))) return except Exception as exc: suggestions.append( self.create_error_item(label=user_input, short_desc="Error: " + str(exc))) traceback.print_exc() return idx = 0 for res in results: suggestions.append( self.create_item(category=self.ITEMCAT_RESULT, label=str(res['translation']), short_desc=str(res['description']), target=str(idx) + str(res['translation']), args_hint=kp.ItemArgsHint.REQUIRED, hit_hint=kp.ItemHitHint.IGNORE, icon_handle=self._icon, data_bag=kpu.kwargs_encode( word=word, translation=res['translation']))) idx += 1 if suggestions: self.set_suggestions(suggestions, kp.Match.ANY, kp.Sort.NONE)
def _parse_ui5_doc(self): opener = kpnet.build_urllib_opener() with opener.open( "https://sapui5.hana.ondemand.com/docs/api/api-index.json" ) as response_file: response = response_file.read() j = json.loads(response.decode('utf-8')) for j_symbol in j['symbols']: self._parse_ui5_doc_node(j_symbol)
def on_suggest(self, user_input, items_chain): if not items_chain or items_chain[-1].category( ) != self.ITEMCAT_TRANSLATE: return current_item = items_chain[-1] suggestions = [] # read query args from current item, if any # then override item's query args with user_input if needed query = self._parse_and_merge_input(current_item, user_input) # query google translate if needed if query['lang_in'] and query['lang_out'] and len(query['terms']): # avoid doing too much network requests in case user is still typing if self.should_terminate(0.25): return results = [] try: # get translated version of terms opener = kpnet.build_urllib_opener() opener.addheaders = [("User-agent", self.API_USER_AGENT)] url = self._build_api_url(query['lang_in'], query['lang_out'], query['terms']) with opener.open(url) as conn: response = conn.read() if self.should_terminate(): return # parse response from the api results = self._parse_api_response(response, query['lang_in']) except urllib.error.HTTPError as exc: suggestions.append( self.create_error_item(label=user_input, short_desc=str(exc))) except Exception as exc: suggestions.append( self.create_error_item(label=user_input, short_desc="Error: " + str(exc))) traceback.print_exc() # create a suggestion from api's response, if any for res in results: suggestions.append( self._create_result_item(lang_in=res['lang_in'], lang_out=query['lang_out'], search_terms=query['terms'], search_result=res['result'])) # push suggestions if any if suggestions: self.set_suggestions(suggestions, kp.Match.ANY, kp.Sort.NONE)
def _build_urlopener(self): """Creates an urllib opener with 2 custom handlers and returns it """ self.dbg("Building urlopener") user_agent = "{}/{} python-{}/{}.{}.{}".format(kp.name(), kp.version_string(), urllib.__name__, sys.version_info[0], sys.version_info[1], sys.version_info[2]) opener = kpn.build_urllib_opener(extra_handlers=[RedirectorHandler()]) opener.addheaders = [("Accept-Encoding", "gzip"), ("User-Agent", user_agent)] return opener
def _on_suggest_translate(self, user_input, items_chain, current_item): suggestions = [] self._query = self._extract_search_query(current_item, user_input) self.dbg(self._query) if len(self._query.text): if self.should_terminate(self.DEFAULT_IDLE_TIME): return self._result = self.Result(False, '', '') self._words = [] try: opener = kpnet.build_urllib_opener() req = self._build_api_request(self._query) with opener.open(req) as conn: response = conn.read() if self.should_terminate(): return self._result, self._words = self._parse_api_response(response) self.dbg(self._result, self._words) except urllib.error.HTTPError as exc: suggestions.append( self.create_error_item(label=user_input, short_desc=str(exc))) except Exception as exc: suggestions.append( self.create_error_item(label=user_input, short_desc="Error: " + str(exc))) traceback.print_exc() suggestions.append( self._create_result_item(self._query, self._result)) word = self._get_current_word(items_chain) if word: is_last = (word == self._words[-1]) suggestions.extend( self._create_candidate_items(self._query, self._result.successful, word, "", is_last)) if suggestions: self.set_suggestions(suggestions, kp.Match.ANY, kp.Sort.NONE)
def _fetch_text(self, type, ammount, start): try: opener = kpnet.build_urllib_opener() headers = [] headers.append( ('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)')) opener.addheaders = headers[:] with opener.open(self.LOREM_IPSUM_API + "?amount=" + ammount + "&start=" + start + "&what=" + type) as request: response = request.read().decode(encoding="utf-8", errors="strict") data = json.loads(response) return data['feed']['lipsum'] except: return "lipsum.com website could not be reached!"
def load_from_url(self): self.plugin.info("loading from cache server...") opener = kpnet.build_urllib_opener() opener.addheaders = [("User-agent", "Mozilla/5.0")] requestURL = self.build_request() with opener.open(requestURL) as conn: response = conn.read() data = json.loads(response) rates = data['rates'] currencies = {} for rate in rates: private_rate = {'name': rate, 'price': rates[rate]} currencies[rate] = private_rate return currencies, data['timestamp']
def generate_cache(self): self.dbg("generate_cache user",self.TOKEN,self.DOMAIN) cache_path_c = self.get_cache_path(self.PREFIX) should_generate = False cache_path = self.get_package_cache_path(True) self.dbg(cache_path) for i in os.listdir(cache_path): self.dbg('Find',i) if os.path.isfile(os.path.join(cache_path,i)) and (self.PREFIX + self.TITLE.lower()) in i: file = os.path.join(cache_path,i) self.dbg('file',file) break try: last_modified = datetime.fromtimestamp(os.path.getmtime(file)).date() if ((last_modified - datetime.today().date()).days > self.DAYS_KEEP_CACHE): should_generate = True except Exception as exc: should_generate = True if not should_generate: return True opener = kpnet.build_urllib_opener() urlChannels= urljoin(self.DOMAIN ,'/api/v4/projects?owned=true&per_page=100&private_token=' + self.TOKEN) offset=1 total= self.LIMIT while offset < total: try: with opener.open(urlChannels + '&page=' + str(offset)) as request: response = request.read() data = json.loads(response) if len(data)<10: total=offset self.dbg(offset,total) with open(self.get_cache_path(str(offset) +self.PREFIX), "w") as index_file: json.dump(data, index_file, indent=2) offset= offset + 1 except Exception as exc: self.err("Could not reach the entries to generate the cache: ", exc) return (offset>0)
def on_start(self): self.logo = 'res://%s/%s'%(self.package_full_name(),'reddit.png') self.CACHE = "cache://" + self.package_full_name() self.PREVIEW_PATH = self.get_package_cache_path(create=True) self.opener = kpnet.build_urllib_opener() actions = [ self.create_action( name=self.ACTION_OPEN_URL, label="Read", short_desc="Opens the subreddit in a browser" ), self.create_action( name=self.ACTION_COPY_URL, label="Copy", short_desc="Copy the URL of subreddit into clipboard" )] self.set_actions(self.ITEMCAT_RESULT, actions) self._popular_suggestions() pass
def get_jobs(self, urlChannels): try: opener = kpnet.build_urllib_opener() self.dbg(urlChannels) with opener.open(urlChannels) as request: response = request.read() data = json.loads(response) with open( self.get_cache_path( str(int(round(time.time()))) + self.PREFIX), "w") as index_file: json.dump(data, index_file, indent=2) for j in data['jobs']: if "folder" in j['_class']: ch = self.get_url_channel(j['url']) self.get_jobs(ch) except Exception as exc: self.err("Could not reach the entries to generate the cache: ", exc)
def load_from_url(self): self.plugin.info("loading from API...") opener = kpnet.build_urllib_opener() #opener.addheaders = [("User-agent", "Mozilla/5.0")] params = {'app_id': self.APPID, 'show_alternative': True} requestURL = self.build_request(params) with opener.open(requestURL) as conn: response = conn.read() data = json.loads(response) rates = data['rates'] currencies = {} for rate in rates: private_rate = {'name': rate, 'price': rates[rate]} currencies[rate] = private_rate return currencies, data['timestamp']
def load_from_url(self): print("loading from url...") opener = kpnet.build_urllib_opener() opener.addheaders = [("User-agent", "Mozilla/5.0")] with opener.open(self.url) as conn: response = conn.read() data = json.loads(response) rates = data['list']['resources'] self._currencies = {} for rate in rates: fields = rate['resource']['fields'] symbol = fields['symbol'][0:3] name = fields['name'].replace('USD/', '') price = float(fields['price']) private_rate = {'name': name, 'price': price} self._currencies[symbol] = private_rate self.last_update = datetime.now() self._load_secondary_data()
def on_events(self, flags): if flags & kp.Events.NETOPTIONS: self._urlopener = kpnet.build_urllib_opener()
def _make_request(self, query, lang): url = self.API_URL + lang + '/' + urllib.parse.quote(query) opener = kp_net.build_urllib_opener() with opener.open(url) as conn: return conn.read()
def __init__(self): super().__init__() self._urlopener = kpnet.build_urllib_opener()
def on_start(self): self.opener = kpnet.build_urllib_opener() self.opener.addheaders = [('Accept-Encoding', 'gzip')]