def postPrams(option, url, username, password): global parameters requestMenu = subprocess.getoutput("echo `zenity --list --title='PTTP' --text='Build your request:' --column='Option' 'Add parameter' 'Send request'`") requestMenu = requestMenu.split("\n") requestMenu = requestMenu[len(requestMenu) - 1] if requestMenu == "Add parameter": parameter = addParameter() if parameter != "": parameter = parameter.split("|") name = parameter[0] value = parameter[1] parameters[name] = value postPrams(option, url, username, password) else: postPrams(option, url, username, password) elif requestMenu == "Send request": request("POST", url, username, password, parameters) if option == "No authentication (plain text)" or option == "Basic authentication (plain text)": os.system("zenity --text-info --title='PTTP' --filename='response.txt'") else: os.system("zenity --text-info --html --title='PTTP' --filename='response.txt'") os.remove("response.txt") parameters = {} menu() else: menu()
def register(self, identity: Identity, public_key: PKey): title = get_key_fingerprint(public_key) data = json.dumps({ 'title': title, 'key': format_openssh_pubkey(public_key) }) try: request(identity, self.LIST_URL, 'POST', data=data.encode()) except urllib.request.HTTPError as e: if e.code != 422: raise content_type = e.headers.get('Content-Type') mimetype, options = parse_options_header(content_type) if mimetype != 'application/json': raise charset = options.get('charset', 'utf-8') response = json.loads(e.read().decode(charset)) for error in response.get('errors', []): if not isinstance(error, dict): continue elif error.get('field') != 'key': continue message = error.get('message', '').strip().lower() if message != 'key is already in use': continue raise DuplicatePublicKeyError(message) raise
def _ensure_ddoc_exists(self): if self._ensured_ddoc_exists: return try: remote_ddoc = request('GET', url(self.url, '_design', 'hypothesis'), headers=self.headers) except urllib.request.HTTPError as err: if err.code != 404: raise request('PUT', url(self.url, '_design', 'hypothesis'), data=json.dumps(self.ddoc()).encode(), headers=self.headers) else: local_ddoc = self.ddoc() expected_view = local_ddoc['views']['by_key'] stored_view = remote_ddoc.get('views', {}).get('by_key', {}) if expected_view != stored_view: remote_ddoc.setdefault('views', {}) remote_ddoc['views']['by_key'] = expected_view request('PUT', url(self.url, '_design', 'hypothesis'), data=json.dumps(remote_ddoc).encode(), headers=self.headers) self._ensured_ddoc_exists = True
def create_account(strategy, backend, user, response, details, is_new=False, *args, **kwargs): if is_new: acc = Account.objects.create(user=user) acc.verified = True acc.save() if backend.name == "facebook": print(str(response)) url = "http://graph.facebook.com/{0}/picture".format(response["id"]) resp = request("GET", url, params={"type": "large"}) acc.profile_pic.save(str(acc.user.id) + ".jpg", ContentFile(resp.content)) acc.save() elif backend.name == "twitter": print(str(response)) url = response["profile_image_url"] url = url.replace("_normal", "") resp = request("GET", url, params={}) acc.profile_pic.save(str(acc.user.id) + ".jpg", ContentFile(resp.content)) acc.save() elif backend.name == "google-oauth2": url = response["image"]["url"] url = url.replace("?sz=50", "?sz=250") resp = request("GET", url, params={}) acc.profile_pic.save(str(acc.user.id) + ".jpg", ContentFile(resp.content)) acc.save()
def _callbackConnect(self, params, error=False, server=None, **kwargs): """ Callback after /version response. Continue execution of query :param method: HTTP method :param path: Remote path :param body: params to send (dictionary or pathlib.Path) :param original_context: Original context :param callback: callback method to call when the server replies """ if error is not False: if self._retry < self.getMaxRetryConnection(): self._retryConnection(server=server) return for request, callback in self._query_waiting_connections: if callback is not None: self._connectionError(callback) return if "version" not in params or "local" not in params: if self._retry < self.getMaxRetryConnection(): self._retryConnection(server=server) return msg = "The remote server {} is not a GNS3 server".format(self.url()) log.error(msg) for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) self._query_waiting_connections = [] return if params["version"].split("-")[0] != __version__.split("-")[0]: msg = "Client version {} is not the same as server (controller) version {}".format(__version__, params["version"]) # Stable release if __version_info__[3] == 0: log.error(msg) for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) return # We don't allow different major version to interact even with dev build elif parse_version(__version__)[:2] != parse_version(params["version"])[:2]: log.error(msg) for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) return log.warning("{}\nUsing different versions may result in unexpected problems. Please upgrade or use at your own risk.".format(msg)) self._connected = True self._retry = 0 self.connection_connected_signal.emit() for request, callback in self._query_waiting_connections: if request: request() self._query_waiting_connections = []
def _callbackConnect(self, params, error=False, server=None, **kwargs): """ Callback after /version response. Continue execution of query :param method: HTTP method :param path: Remote path :param body: params to send (dictionary or pathlib.Path) :param original_context: Original context :param callback: callback method to call when the server replies """ if error is not False: if self._retry < self.MAX_RETRY_CONNECTION: self._retryConnection(server=server) return for request, callback in self._query_waiting_connections: if callback is not None: self._connectionError(callback) return if "version" not in params or "local" not in params: if self._retry < self.MAX_RETRY_CONNECTION: self._retryConnection(server=server) return msg = "The remote server {} is not a GNS3 server".format(self.url()) log.error(msg) for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) self._query_waiting_connections = [] return if params["version"].split("-")[0] != __version__.split("-")[0]: msg = "Client version {} differs with server version {}".format(__version__, params["version"]) log.error(msg) # Stable release if __version_info__[3] == 0: for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) return # We don't allow different major version to interact even with dev build elif parse_version(__version__)[:2] != parse_version(params["version"])[:2]: for request, callback in self._query_waiting_connections: if callback is not None: callback({"message": msg}, error=True, server=server) return log.warning("Use a different client and server version can create bugs. Use it at your own risk.") self._connected = True self._retry = 0 self.connection_connected_signal.emit() for request, callback in self._query_waiting_connections: if request: request() self._query_waiting_connections = []
def save(self, key: str, value: list): self._ensure_setup() request( method='PUT', url=url(self.url, str(uuid.uuid4())), data=json.dumps({'key': format_key(key), 'value': value, 'type': 'example'}).encode(), headers=self.headers)
def _ensure_db_exists(self): if self._ensured_db_exists: return try: request('GET', url(self.url), headers=self.headers) except urllib.request.HTTPError as err: if err.code != 404: raise request('PUT', url(self.url), headers=self.headers) self._ensured_db_exists = True
def licht_toggle(q, wildcards): #Prozessverarbeitung receivers = json.loads(request("http://zimmer:2525/remote/list")) #JSON request found = False for id, val in enumerate(receivers): if (receivers[val]['title'].lower() == wildcards[0]): found = True request("http://zimmer:2525/remote/switch?id=" + str(val)) if (found == False): print("Diese Lampe kenne ich nicht") else: print("Ich habe die gewuenschte Lampe umgeschaltet")
def verify(name, type, content, package=package, project=project, parent=parent, endpoint=endpoint): # send job verify2 to api sock = request('verify2', name, type, content, package, project, parent, endpoint) # decode result into sane json resp = decode(sock.recv()) while resp['status'] == 'processing': # parse results into a number indication prove result if 'Proved' in resp['result']['result']: result = 0 elif 'Timeout' in resp['result']['result']: result = 1 elif 'Skipped' in resp['result']['result']: result = 2 else: result = 3 # yield verification with id and result yield {'id': resp['result']['id'], 'result': result} # decode result into sane json resp = decode(sock.recv()) # make sure job completed if resp['status'] != 'complete': try: raise ResolveCompilerError(resp['errors'][0]['errors']) except KeyError: raise ResolveAPIError(resp['bugs'][0]['bugs'])
def login(key, resp): success = False resp = request( config["target"][key]["url_login"], config["target"][key]["encoding"], config["target"][key]["query_login"].format( username=urllib.parse.quote_plus(config["target"][key]["username"]), password=urllib.parse.quote_plus(config["target"][key]["password"]), pwdmd5=hashlib.md5(config["target"][key]["password"].encode(config["target"][key]["encoding"])).hexdigest(), pwdmd5_utf=hashlib.md5(config["target"][key]["password"].encode("utf-8")).hexdigest(), formhash=formhash(key, resp), smfhash=smfhash(key, resp), ), ) if not resp: prtmsg("err_login_fail") return False if config["target"][key].get("regex_login_success"): if re.search(config["target"][key]["regex_login_success"], resp): prtmsg("msg_login_success", key) success = True elif config["target"][key].get("regex_login_fail"): match = re.search(config["target"][key]["regex_login_fail"], resp) if match: prtmsg("err_login_fail_reason", key, reason=msg_cleanup(groupsel(match, key, "login_fail"))) else: prtmsg("err_login_fail", key) else: prtmsg("err_login_fail", key) else: prtmsg("msg_login_unknown", key) success = True save_cookies() return success
def apirequest(params): url = 'https://' + re.sub(r'wiki$', '', PREFIX) + '.wikipedia.org/w/api.php'; params['format'] = 'json' params = urllib.parse.urlencode(params).encode('utf-8') data = request(url, params) if data is None: return None return json.loads(data.decode('utf-8'))
def save(links, dirName, img_type, image_links=False): for i in range(len(links)): img_name = '{}{:03}.{}'.format(dirName, i+1, img_type) if not os.path.exists(img_name.replace('.jpg', '.png')) and not os.path.exists(img_name.replace('.png', '.jpg')): print('\r Downloading {0} of {1}'.format(*(i+1, len(links))), end="") if image_links: img_url = links[i] elif 'bato.to' in links[i]: img_url = re.search('<div.*?>\\s*<img[^<]*?src=\"([^\"]*?)\"[^>]*?/>\\s*</div>', get_html(links[i]), re.DOTALL|re.MULTILINE).group(1) elif 'goodmanga.net' in links[i]: img_url = re.search('</div>\\s*<a.*?>\\s*<img[^<]*?src=\"(.*?)\".*?>\\s*</a>', get_html(links[i]), re.DOTALL|re.MULTILINE).group(1) else: img_url = re.search('<a.*?>\\s*<img[^<]*?src=\"(.*?)\".*?>\\s*</a>', get_html(links[i]), re.DOTALL|re.MULTILINE).group(1) for j in range(2): for k in range(7): try: data = request(img_url) break except: if j == 0 and k == 6 and 'bato.to' in img_url: if img_url.endswith('png'): img_url = re.sub('png$', 'jpg', img_url) img_name = '{}{:03}.{}'.format(dirName, i+1, 'jpg') else: img_url = re.sub('jpg$', 'png', img_url) img_name = '{}{:03}.{}'.format(dirName, i+1, 'png') if j == 1 and k == 6: raise pass time.sleep(1.7) with open(img_name, 'wb') as f: f.write(data) print()
def get_page_by_id( page_id ): random_page = request( {'action':'query', 'pageids':page_id, 'prop':'revisions', 'rvprop':'content', 'rvlimit':1, 'format':'json', 'grnnamespace':0} ) data = json.loads(random_page.decode('utf-8')) page_id, page_data = data['query']['pages'].popitem() title = page_data['title'] content = page_data['revisions'][0]['*'] return ( title, page_id, content )
def addChina(): #央广网 url = 'http://bfq.cnr.cn/zhibo/' link = request(url) urls=re.compile('http://.*?/playlist\.m3u8').findall(link) nms=re.compile('(?<!-)<td><ahref="javascript:onclick=changeTab1\(\d*?\);">(.*?)</a></td>').findall(link) for i, item in enumerate(urls): addLink('%d.%s'%(i+1, nms[i]), item)
def getRankSong(url): lists = [] tree = request(url) soup = tree.find('div', {'class': 'songList'}) for i in soup.find_all('ul'): for a in i.find_all('a'): lists.append((a.text.encode('utf-8'), MODE_SONG, a['href'])) return lists
def request_and_parse(url, query=None, method=None): # skip cert verification for PR (because who knows why) if getattr(ssl, '_create_unverified_context', None): ssl._create_default_https_context = ssl._create_unverified_context res = request(url, query, method=method) res = json.loads(res) return res
def get_request(key, secret, path): nowInMilisecond = str(int(time.time() * 1000)) stringToSign = 'path + "\n" + nowInMilisecond + "\n"'.encode() signature = base64.b64encode(hmac.new(secret, stringToSign, digestmod=hashlib.sha512).digest()) return request('get', key, signature, nowInMilisecond, path, None)
def do_request(url, method): try: opener = request() _request = urllib.request.Request(url, method=method) response = opener.open(_request, timeout=10) return response except Exception as error: print("\033[1;31;40m\t\t\t+Error: ", error, url, "\033[0m")
def telerstr(self,regionstr): rstr = "®ion=" + str(regionstr) text = request('http://openapi.work.go.kr/opi/opi/opia/wantedApi.do?authKey=WNKAHJXAWPT27BR8CVH0M2VR1HK&callTp=L&returnType=XML&startPage=1&display=10' + rstr) rjobs = extractXmlData(text) return rjobs
def opensearch(self):#검색 결과 text = request('http://openapi.work.go.kr/opi/opi/opia/dhsOpenEmpInfoAPI.do?authKey=WNKAHJXAWPT27BR8CVH0M2VR1HK&callTp=L&returnType=XML&startPage=1&display=15') self.opens = extractXmlOpenData(text) self.openbox.delete(0, END) for i in range(len(self.opens)): self.openbox.insert(END, self.opens[i].pirntstrOpen())
def supportsearch(self):#검색 결과 text = request('http://openapi.work.go.kr/opi/opi/opia/jynEmpSptListAPI.do?authKey=WNKAHJXAWPT27BR8CVH0M2VR1HK&returnType=xml&busiTpcd=PLCYTP01&chargerClcd=G&startPage=1+&display=20') self.supports = extractXmlSupportData(text) self.supportbox.delete(0, END) for i in range(len(self.supports)): self.supportbox.insert(END, self.supports[i].pirntstrSupport())
def getMovie(movieid): movie = request(url + url_details + '?movie_id=' + movieid) if movie is False: print("Error getting movie " + movieid) elif movie['status'] == 'error': print("Error getting movie " + movieid + ": " + movie['status_message']) else: return movie['data']
def send_bot(msg=""): """useful function to send a message to your bot in cli""" debug("send_bot called") try : request(msg) except Exception as e: try : logging.warning(e) request("request failed, please watch out logging file") except Exception as e : logging.warning(e) ; logging.warning("error send_bot, bad request")
def upload_file(self, file_path): f = open(file_path, 'rb') data = f.read() f.close() data_hash = md5hash(data) parameters = {"Hash": data_hash, "Metadata": {'Processed': True}} try: request(self.__json_data, 'edit', self.__service_url, self.__client_name, parameters) logging.info("Uploaded file " + file_path.split("\\")[-1] + " to " + self.__service_url) except URLError: os.remove(file_path) logging.error("Server error.") logging.info("Removed " + file_path.split("\\")[-1] + " from " + self.__download_dir_path)
def getFilter(id): unit = "retrieveFilters.getFilter" url = "%s/rest/api/2/filter/%s" % (siteUrl, id) log(unit, "Sending getFilter(id = %s)" % id) return request(url)
def search(filterId): unit = "retrieveFilters.search" url = "%s&fields=key,created,priority,status" % getSearchUrlByFilter(filterId) log(unit, "Sending search(filterId = %s)" % filterId) return request(url)
def main(): loop = 1 while loop: os.system('cls' if os.name == 'nt' else 'clear') ''' Update ''' for c in range(len(sys.argv)-1): coin = "" if (len(sys.argv) == 1) else sys.argv[c+1] res = request("https://api.coinmarketcap.com/v1/ticker/"+coin) if res == "": continue PRICE_RANGE = 2 * (10 * math.log(float(res[0]["price_usd"]))) ''' Load array ''' for i in range(0,HSIZE-1): price[coin][i] = price[coin][i+1] price[coin][int(HSIZE*(3/4))] = float(res[0]["price_usd"]) ''' Get frame information ''' min_price = int(min(list(filter((-1).__ne__, price[coin]))) - (PRICE_RANGE/2)) ''' Print output -- Also a file output ''' f = open(res[0]["name"].lower()+".chart","w") s = "" + res[0]["name"] + " at " + res[0]["price_usd"] + " USD"; print(s) f.write(s+"\n") for i in range(0,VSIZE): curprice = min_price + PRICE_RANGE * ((VSIZE-i)/VSIZE) lowerprice = curprice - PRICE_RANGE/10 if max(price[coin]) < 30: num = int(curprice) else: num = int(curprice) if i != 0 and i != VSIZE - 1 and i != int(VSIZE/2): string = " |" else: string = '{0:<5}'.format(str(num)) + " |" for j in range(0,HSIZE): if (price[coin][j] <= curprice and price[coin][j] >= lowerprice): string = string + "+" else: string = string + " " print(string) f.write(string + "\n") print() f.close() ''' Sleep til the next cycle ''' time.sleep(60*10) return 0
def install_package(query, dependencies=False, verbose=True, repos="http://cran.univ-lyon1.fr/"): """Install a R package :param str query: It can be a valid URL to a R package (tar ball), a CRAN package, a path to a R package (tar ball), or simply the directory containing a R package source. :param bool dependencies: :param repos: if provided, install_packages automatically select the provided repositories otherwise a popup window will ask you to select a repo :: >>> rtools.install_package("path_to_a_valid_Rpackage.tar.gz") >>> rtools.install_package("http://URL_to_a_valid_Rpackage.tar.gz") >>> rtools.install_package("hash") # a CRAN package >>> rtools.install_package("path to a valid R package directory") .. seealso:: :class:`biokit.rtools.RPackageManager` """ session = RSession(verbose=verbose) # Is it a local file? if os.path.exists(query): repos = 'NULL' else: repos = '"{0}"'.format( repos) # we want the " to be part of the string later on try: # PART for fetching a file on the web, download and install locally if verbose: print("Trying from the web ?") data = urllib.request(query) fh = TempFile(suffix=".tar.gz") with open(fh.name, 'w') as fh: for x in data.readlines(): fh.write(x) code = """install.packages("%s", dependencies=%s """ % \ (fh.name, bool2R(dependencies)) code += """ , repos=NULL) """ session.run(code) except Exception as err: if verbose: print(err.message) print("trying local or from repos") print( "RTOOLS warning: URL provided does not seem to exist %s. Trying from CRAN" % query) code = """install.packages("%s", dependencies=%s """ % \ (query, bool2R(dependencies)) code += """ , repos=%s) """ % repos session.run(code) return
def main(): url = input("Enter the url: ") paresed_data = request(url) #print(paresed_data) #print(getarticle(paresed_data)) articles = getarticle(paresed_data) print("The Article \n\n") for article in articles: print("{} \n".format(article.text))
def web_traffic(url): try: rank = \ bs4.BeautifulSoup(urllib.request("http://data.alexa.com/data?cli=10&dat=s&url=" + url).read(), "xml").find( "REACH")['RANK'] except TypeError: return -1 rank = int(rank) return 1 if rank < 100000 else 0
def post(url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=60, allow_redirects=False, stream=False): """Initiate a POST request. Arguments as for :func:`request`. :returns: :class:`Response` instance """ return request('POST', url, params, data, headers, cookies, files, auth, timeout, allow_redirects, stream)
def _list_keys(self, identity: Identity) -> typing.Iterable[PKey]: logger = self.logger.getChild('list_keys') keys = request(identity, self.list_url) for key in keys: try: yield parse_openssh_pubkey(key['key']), key except Exception as e: logger.exception(str(e)) continue
def crawler (): final_results = [] main_link = 'http://www.evening-kazan.ru/categories/ekonomika.html' html = request (main_link) not_the_first = next_and_last (html) not_the_first.insert (0, main_link) for link in not_the_first: html = request (link) links_articles = re.findall ('/articles/.*?\.html', html) for link in links_articles: link = create_a_link (link) if link not in final_results: final_results.append (link) return final_results
def get_categories(catalog): payload = { **payload_default, 'ClassificationID': catalog, } url = get_url('CategoriesList', payload) result = request(url) return result.json()['categorieList']
def get_all_pages(pages): global data page = 1 while page <= pages: print('Getting results page ' + str(page) + ' of ' + str(pages) + ' ...') response, content = request({'page': str(page), 'per_page': '50'}) data.extend(json.loads(content)) page += 1 time.sleep(2)
def update_definition(self): """ Get definition file from web """ # TODO: Requires error catching req = urllib.request(self.url) response = urllib.urlopen(req, timeout=60) content = response.readlines() with open(str(self.local_ls_ietf_definition), "w") as fhandle: for line in content: fhandle.write(line)
def process(): with open(output_path, 'w', encoding=args.encoding) as output_file: if args.separate == 0: conditional_info("[INFO] Processing type: Batch") params = urllib.parse.urlencode({'tool': args.tool, 'input': full_text, 'token': token}).encode(pipeline_encoding) output_file.write("{0}\n".format(request(params))) print("[DONE] It took {0} seconds to process {1} sentences".format(str(time.time()-start_time).split('.')[0], sentence_count)) else: conditional_info("[INFO] Processing type: Sentence-by-sentence") for sentence in sentences: params = urllib.parse.urlencode({'tool': args.tool, 'input': sentence, 'token': token}).encode(pipeline_encoding) output_file.write("{0}\n".format(request(params))) conditional_info("[INFO] Processing {0}".format(sentence)) print("[DONE] It took {0} seconds to process all {1} sentences.".format(str(time.time()-start_time).split('.')[0], sentence_count))
def get_business(business_id): """Query the Business API by a business ID. Args: business_id (str): The ID of the business to query. Returns: dict: The JSON response from the request. """ business_path = BUSINESS_PATH + business_id return request(API_HOST, business_path)
def telekstr(self, kstr): tstr = urllib.parse.quote(kstr) rstr = '&keyword='+tstr text = request('http://openapi.work.go.kr/opi/opi/opia/wantedApi.do?authKey=WNKAHJXAWPT27BR8CVH0M2VR1HK&callTp=L&returnType=XML&startPage=1&display=10' + rstr) print(text) rjobs = extractXmlData(text) return rjobs
def get_html(url, set_head=False): h = request(url, set_head=set_head) return html.unescape(h.text).replace( # '&' , '&' ).replace( # '"', '\"').replace( # '<' , '<' ).replace( # '>' , '>' ).replace( '\\n', '\n').replace('\\t', '\t').replace('\\r', '')
def get_stream_from_follwoed(tok): """Return an avaialble stream from the user's followed stream if possilbe.""" blob = request(api_url_auth.format(rest="/streams/followed", token=tok)) if not blob: return False if blob["streams"]: return blob["streams"][0]["channel"]["name"] else: return False
def get_xueqiu_cube_list(category, count, orderby): url = cube_list_url + "?category=" + category + "&count=" + count + "&market=cn&profit=" + orderby data = request(url, cookie) jsonObj = json.loads(data.read()) db = TinyDB('data/db_cube.json') table = db.table("Cube") db.purge_table("Cube") for TopestCube in jsonObj["list"]: table.insert(TopestCube)
def post(url, parms, headers): try: data = urllib.urlencode(parms) except: data = parms if headers == '': req = urllib.request(url, data) else: req = urllib.request(url, data, headers) try: response = urllib2.urlopen(req) code = response.code responsedata = response.read() responsedata = json.loads(responsedata) return responsedata, code except Exception as e: print(e) return None, e.code
def post_request(key, secret, path, postData): nowInMilisecond = str(int(time.time() * 1000)) stringToSign = path + "\n" + nowInMilisecond + "\n" + postData signature = base64.b64encode( hmac.new(secret, stringToSign, digestmod=hashlib.sha512).digest()) return request('post', key, signature, nowInMilisecond, path, postData)
def is_online(channel): """Check if given channel is currently online.""" blob = request(api_url.format(rest="/streams/" + channel)) if not blob: return False if blob["stream"] is None: return False else: return True
def main(): # NASA developer key (You can hardcode yours for higher request rate limits!) # I had my own API :B https://api.nasa.gov/planetary/apod?api_key=qqCz1Z2BTVYqS961si4EWIwd6iyDHnev3VGWTMnY # API_KEY = "cZX0zRDveiz7AfGfOW23typMH3NCnS3uvQJc0ZNS" API_KEY = "qqCz1Z2BTVYqS961si4EWIwd6iyDHnev3VGWTMnY" # parse command line arguments args = parse_command_line() # update API_KEY if passed on the command line print(args.api_key) if args.api_key != '': API_KEY = args.api_key # create a request date d = create_date(args.date, args.surprise) # ascertain a valid date was created, otherwise exit program if d is None: print("No valid date selected!") exit() # verbose mode if args.verbose: print("Image date: {}".format(d.strftime("%b %d, %Y"))) # generate query url url = query_url(d, API_KEY) # verbose mode if args.verbose: print("Query URL: {}".format(url)) # download the image metadata as a Python dictionary metadata = request(url) # verbose mode if args.verbose: # display image title, other metadata can be shown here print("Image title: {}".format(metadata['title'])) # get the url of the image data from the dictionary image_url = metadata['url'] # verbose mode if args.verbose: print("Downloading image from:", image_url) # download the image itself (the returned info is binary) image = download_image(image_url) # save the downloaded image into disk in (year/month) # the year and month directories correspond to the date of the image (d) # the file name is the date (d) + '.jpg' save_image(d, image) print("Image saved")
def test_flask(install, venv, app): install(extra="flask") with background_task("vue-cli", "deploy", "flask", env={"PATH": f"{venv / 'bin'}"}, cwd=str(app)): assert (request("http://localhost:5000", retries=5, retry_delay=0.5).status == 200)
def request(self, url, data=None): log.info(f'Request: {self.url_base}, {url}, {data}') uri = urllib.parse.urljoin(self.url_base, url) response = request(uri, data) if response.error: if response.content: raise RequestError(response.content) else: raise RequestError(f'{response.error} {response.msg}') return response.content
def addBeijing(): #北广网 url = 'http://listen.rbc.cn/baidu/' link = request(url) match=re.compile('varaddrs=newArray\("","(.+?)"\);').findall(link) ids=match[0].split('","') match=re.compile('varstation=newArray\("","(.+?)"\);').findall(link) nms=match[0].split('","') for i, item in enumerate(ids): addLink('%d.%s'%(i+1, nms[i]), 'mms://alive.rbc.cn/'+item)
def authorize(self, identity: Identity) -> bool: if not issubclass(identity.team_type, type(self)): return False try: response = request(identity, self.ORGS_LIST_URL) except IOError: return False if isinstance(response, collections.Mapping) and 'error' in response: return False return any(o['login'] == self.org_login for o in response)
def get(url, params=None, headers=None, cookies=None, auth=None, timeout=60, allow_redirects=True, stream=False): """Initiate a GET request. Arguments as for :func:`request`. :returns: :class:`Response` instance """ return request('GET', url, params, headers=headers, cookies=cookies, auth=auth, timeout=timeout, allow_redirects=allow_redirects, stream=stream)
def get_html(url, set_head=False): h = request(url, set_head=set_head) return html.unescape(h.text).replace( # '&' , '&' ).replace( # '"', '\"').replace( # '<' , '<' ).replace( # '>' , '>' ).replace( '\\n' , '\n').replace( '\\t' , '\t').replace( '\\r' , '' )
def delete(self, key: str, value: list): self._ensure_setup() result = request( method='GET', url=url(self.url, '_design', 'hypothesis', '_view', 'by_key', reduce='false', include_docs='true', key=json.dumps(format_key(key))), headers=self.headers) for row in result['rows']: if row['value'] != value: continue request( method='DELETE', url=url(self.url, row['id'], rev=row['doc']['_rev']), headers=self.headers)
def getPage(self): try: url = "http://www.qiushibaike.com/hot/" request = urllib.request(url,headers = self.headers) response = urllib.request.urlopen(request) pageCode = response.read().decode("utf-8") return pageCode except urllib.request.URLError as e: if hasattr(e,"reason"): print ("连接糗事百科失败,错误原因",e.reason) return None